blob: f554b86d4064cc2d1ba25df2982e62fce455a2e4 [file] [log] [blame]
Richard Henderson951c6302014-09-19 11:39:20 -07001/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
Peter Maydell757e7252016-01-26 18:17:08 +000025#include "qemu/osdep.h"
Richard Henderson951c6302014-09-19 11:39:20 -070026#include "tcg.h"
27#include "tcg-op.h"
28
Richard Henderson3a13c3f2014-09-19 11:41:05 -070029/* Reduce the number of ifdefs below. This assumes that all uses of
30 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
31 the compiler can eliminate. */
32#if TCG_TARGET_REG_BITS == 64
33extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
34extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
35#define TCGV_LOW TCGV_LOW_link_error
36#define TCGV_HIGH TCGV_HIGH_link_error
37#endif
Richard Henderson951c6302014-09-19 11:39:20 -070038
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070039/* Note that this is optimized for sequential allocation during translate.
40 Up to and including filling in the forward link immediately. We'll do
41 proper termination of the end of the list after we finish translation. */
42
43static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
44{
45 int oi = ctx->gen_next_op_idx;
46 int ni = oi + 1;
47 int pi = oi - 1;
48
49 tcg_debug_assert(oi < OPC_BUF_SIZE);
50 ctx->gen_last_op_idx = oi;
51 ctx->gen_next_op_idx = ni;
52
53 ctx->gen_op_buf[oi] = (TCGOp){
54 .opc = opc,
55 .args = args,
56 .prev = pi,
57 .next = ni
58 };
59}
60
Richard Henderson951c6302014-09-19 11:39:20 -070061void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
62{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070063 int pi = ctx->gen_next_parm_idx;
Richard Henderson951c6302014-09-19 11:39:20 -070064
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070065 tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
66 ctx->gen_next_parm_idx = pi + 1;
67 ctx->gen_opparam_buf[pi] = a1;
Richard Henderson951c6302014-09-19 11:39:20 -070068
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070069 tcg_emit_op(ctx, opc, pi);
Richard Henderson951c6302014-09-19 11:39:20 -070070}
71
72void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
73{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070074 int pi = ctx->gen_next_parm_idx;
Richard Henderson951c6302014-09-19 11:39:20 -070075
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070076 tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
77 ctx->gen_next_parm_idx = pi + 2;
78 ctx->gen_opparam_buf[pi + 0] = a1;
79 ctx->gen_opparam_buf[pi + 1] = a2;
Richard Henderson951c6302014-09-19 11:39:20 -070080
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070081 tcg_emit_op(ctx, opc, pi);
Richard Henderson951c6302014-09-19 11:39:20 -070082}
83
84void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
85 TCGArg a2, TCGArg a3)
86{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070087 int pi = ctx->gen_next_parm_idx;
Richard Henderson951c6302014-09-19 11:39:20 -070088
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070089 tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
90 ctx->gen_next_parm_idx = pi + 3;
91 ctx->gen_opparam_buf[pi + 0] = a1;
92 ctx->gen_opparam_buf[pi + 1] = a2;
93 ctx->gen_opparam_buf[pi + 2] = a3;
Richard Henderson951c6302014-09-19 11:39:20 -070094
Richard Hendersonc45cb8b2014-09-19 13:49:15 -070095 tcg_emit_op(ctx, opc, pi);
Richard Henderson951c6302014-09-19 11:39:20 -070096}
97
98void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
99 TCGArg a2, TCGArg a3, TCGArg a4)
100{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700101 int pi = ctx->gen_next_parm_idx;
Richard Henderson951c6302014-09-19 11:39:20 -0700102
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700103 tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
104 ctx->gen_next_parm_idx = pi + 4;
105 ctx->gen_opparam_buf[pi + 0] = a1;
106 ctx->gen_opparam_buf[pi + 1] = a2;
107 ctx->gen_opparam_buf[pi + 2] = a3;
108 ctx->gen_opparam_buf[pi + 3] = a4;
Richard Henderson951c6302014-09-19 11:39:20 -0700109
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700110 tcg_emit_op(ctx, opc, pi);
Richard Henderson951c6302014-09-19 11:39:20 -0700111}
112
113void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
114 TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
115{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700116 int pi = ctx->gen_next_parm_idx;
Richard Henderson951c6302014-09-19 11:39:20 -0700117
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700118 tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
119 ctx->gen_next_parm_idx = pi + 5;
120 ctx->gen_opparam_buf[pi + 0] = a1;
121 ctx->gen_opparam_buf[pi + 1] = a2;
122 ctx->gen_opparam_buf[pi + 2] = a3;
123 ctx->gen_opparam_buf[pi + 3] = a4;
124 ctx->gen_opparam_buf[pi + 4] = a5;
Richard Henderson951c6302014-09-19 11:39:20 -0700125
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700126 tcg_emit_op(ctx, opc, pi);
Richard Henderson951c6302014-09-19 11:39:20 -0700127}
128
129void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
130 TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
131{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700132 int pi = ctx->gen_next_parm_idx;
Richard Henderson951c6302014-09-19 11:39:20 -0700133
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700134 tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
135 ctx->gen_next_parm_idx = pi + 6;
136 ctx->gen_opparam_buf[pi + 0] = a1;
137 ctx->gen_opparam_buf[pi + 1] = a2;
138 ctx->gen_opparam_buf[pi + 2] = a3;
139 ctx->gen_opparam_buf[pi + 3] = a4;
140 ctx->gen_opparam_buf[pi + 4] = a5;
141 ctx->gen_opparam_buf[pi + 5] = a6;
Richard Henderson951c6302014-09-19 11:39:20 -0700142
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700143 tcg_emit_op(ctx, opc, pi);
Richard Henderson951c6302014-09-19 11:39:20 -0700144}
145
146/* 32 bit ops */
147
148void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
149{
150 /* some cases can be optimized here */
151 if (arg2 == 0) {
152 tcg_gen_mov_i32(ret, arg1);
153 } else {
154 TCGv_i32 t0 = tcg_const_i32(arg2);
155 tcg_gen_add_i32(ret, arg1, t0);
156 tcg_temp_free_i32(t0);
157 }
158}
159
160void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
161{
162 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
163 /* Don't recurse with tcg_gen_neg_i32. */
164 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
165 } else {
166 TCGv_i32 t0 = tcg_const_i32(arg1);
167 tcg_gen_sub_i32(ret, t0, arg2);
168 tcg_temp_free_i32(t0);
169 }
170}
171
172void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
173{
174 /* some cases can be optimized here */
175 if (arg2 == 0) {
176 tcg_gen_mov_i32(ret, arg1);
177 } else {
178 TCGv_i32 t0 = tcg_const_i32(arg2);
179 tcg_gen_sub_i32(ret, arg1, t0);
180 tcg_temp_free_i32(t0);
181 }
182}
183
184void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
185{
186 TCGv_i32 t0;
187 /* Some cases can be optimized here. */
188 switch (arg2) {
189 case 0:
190 tcg_gen_movi_i32(ret, 0);
191 return;
192 case 0xffffffffu:
193 tcg_gen_mov_i32(ret, arg1);
194 return;
195 case 0xffu:
196 /* Don't recurse with tcg_gen_ext8u_i32. */
197 if (TCG_TARGET_HAS_ext8u_i32) {
198 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
199 return;
200 }
201 break;
202 case 0xffffu:
203 if (TCG_TARGET_HAS_ext16u_i32) {
204 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
205 return;
206 }
207 break;
208 }
209 t0 = tcg_const_i32(arg2);
210 tcg_gen_and_i32(ret, arg1, t0);
211 tcg_temp_free_i32(t0);
212}
213
214void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
215{
216 /* Some cases can be optimized here. */
217 if (arg2 == -1) {
218 tcg_gen_movi_i32(ret, -1);
219 } else if (arg2 == 0) {
220 tcg_gen_mov_i32(ret, arg1);
221 } else {
222 TCGv_i32 t0 = tcg_const_i32(arg2);
223 tcg_gen_or_i32(ret, arg1, t0);
224 tcg_temp_free_i32(t0);
225 }
226}
227
228void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
229{
230 /* Some cases can be optimized here. */
231 if (arg2 == 0) {
232 tcg_gen_mov_i32(ret, arg1);
233 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
234 /* Don't recurse with tcg_gen_not_i32. */
235 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
236 } else {
237 TCGv_i32 t0 = tcg_const_i32(arg2);
238 tcg_gen_xor_i32(ret, arg1, t0);
239 tcg_temp_free_i32(t0);
240 }
241}
242
243void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
244{
245 tcg_debug_assert(arg2 < 32);
246 if (arg2 == 0) {
247 tcg_gen_mov_i32(ret, arg1);
248 } else {
249 TCGv_i32 t0 = tcg_const_i32(arg2);
250 tcg_gen_shl_i32(ret, arg1, t0);
251 tcg_temp_free_i32(t0);
252 }
253}
254
255void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
256{
257 tcg_debug_assert(arg2 < 32);
258 if (arg2 == 0) {
259 tcg_gen_mov_i32(ret, arg1);
260 } else {
261 TCGv_i32 t0 = tcg_const_i32(arg2);
262 tcg_gen_shr_i32(ret, arg1, t0);
263 tcg_temp_free_i32(t0);
264 }
265}
266
267void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
268{
269 tcg_debug_assert(arg2 < 32);
270 if (arg2 == 0) {
271 tcg_gen_mov_i32(ret, arg1);
272 } else {
273 TCGv_i32 t0 = tcg_const_i32(arg2);
274 tcg_gen_sar_i32(ret, arg1, t0);
275 tcg_temp_free_i32(t0);
276 }
277}
278
Richard Henderson42a268c2015-02-13 12:51:55 -0800279void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
Richard Henderson951c6302014-09-19 11:39:20 -0700280{
281 if (cond == TCG_COND_ALWAYS) {
Richard Henderson42a268c2015-02-13 12:51:55 -0800282 tcg_gen_br(l);
Richard Henderson951c6302014-09-19 11:39:20 -0700283 } else if (cond != TCG_COND_NEVER) {
Richard Henderson42a268c2015-02-13 12:51:55 -0800284 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
Richard Henderson951c6302014-09-19 11:39:20 -0700285 }
286}
287
Richard Henderson42a268c2015-02-13 12:51:55 -0800288void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
Richard Henderson951c6302014-09-19 11:39:20 -0700289{
Richard Henderson37ed3bf2015-02-20 11:13:50 -0800290 if (cond == TCG_COND_ALWAYS) {
291 tcg_gen_br(l);
292 } else if (cond != TCG_COND_NEVER) {
293 TCGv_i32 t0 = tcg_const_i32(arg2);
294 tcg_gen_brcond_i32(cond, arg1, t0, l);
295 tcg_temp_free_i32(t0);
296 }
Richard Henderson951c6302014-09-19 11:39:20 -0700297}
298
299void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
300 TCGv_i32 arg1, TCGv_i32 arg2)
301{
302 if (cond == TCG_COND_ALWAYS) {
303 tcg_gen_movi_i32(ret, 1);
304 } else if (cond == TCG_COND_NEVER) {
305 tcg_gen_movi_i32(ret, 0);
306 } else {
307 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
308 }
309}
310
311void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
312 TCGv_i32 arg1, int32_t arg2)
313{
314 TCGv_i32 t0 = tcg_const_i32(arg2);
315 tcg_gen_setcond_i32(cond, ret, arg1, t0);
316 tcg_temp_free_i32(t0);
317}
318
319void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
320{
321 TCGv_i32 t0 = tcg_const_i32(arg2);
322 tcg_gen_mul_i32(ret, arg1, t0);
323 tcg_temp_free_i32(t0);
324}
325
326void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
327{
328 if (TCG_TARGET_HAS_div_i32) {
329 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
330 } else if (TCG_TARGET_HAS_div2_i32) {
331 TCGv_i32 t0 = tcg_temp_new_i32();
332 tcg_gen_sari_i32(t0, arg1, 31);
333 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
334 tcg_temp_free_i32(t0);
335 } else {
336 gen_helper_div_i32(ret, arg1, arg2);
337 }
338}
339
340void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
341{
342 if (TCG_TARGET_HAS_rem_i32) {
343 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
344 } else if (TCG_TARGET_HAS_div_i32) {
345 TCGv_i32 t0 = tcg_temp_new_i32();
346 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
347 tcg_gen_mul_i32(t0, t0, arg2);
348 tcg_gen_sub_i32(ret, arg1, t0);
349 tcg_temp_free_i32(t0);
350 } else if (TCG_TARGET_HAS_div2_i32) {
351 TCGv_i32 t0 = tcg_temp_new_i32();
352 tcg_gen_sari_i32(t0, arg1, 31);
353 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
354 tcg_temp_free_i32(t0);
355 } else {
356 gen_helper_rem_i32(ret, arg1, arg2);
357 }
358}
359
360void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
361{
362 if (TCG_TARGET_HAS_div_i32) {
363 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
364 } else if (TCG_TARGET_HAS_div2_i32) {
365 TCGv_i32 t0 = tcg_temp_new_i32();
366 tcg_gen_movi_i32(t0, 0);
367 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
368 tcg_temp_free_i32(t0);
369 } else {
370 gen_helper_divu_i32(ret, arg1, arg2);
371 }
372}
373
374void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
375{
376 if (TCG_TARGET_HAS_rem_i32) {
377 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
378 } else if (TCG_TARGET_HAS_div_i32) {
379 TCGv_i32 t0 = tcg_temp_new_i32();
380 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
381 tcg_gen_mul_i32(t0, t0, arg2);
382 tcg_gen_sub_i32(ret, arg1, t0);
383 tcg_temp_free_i32(t0);
384 } else if (TCG_TARGET_HAS_div2_i32) {
385 TCGv_i32 t0 = tcg_temp_new_i32();
386 tcg_gen_movi_i32(t0, 0);
387 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
388 tcg_temp_free_i32(t0);
389 } else {
390 gen_helper_remu_i32(ret, arg1, arg2);
391 }
392}
393
394void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
395{
396 if (TCG_TARGET_HAS_andc_i32) {
397 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
398 } else {
399 TCGv_i32 t0 = tcg_temp_new_i32();
400 tcg_gen_not_i32(t0, arg2);
401 tcg_gen_and_i32(ret, arg1, t0);
402 tcg_temp_free_i32(t0);
403 }
404}
405
406void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
407{
408 if (TCG_TARGET_HAS_eqv_i32) {
409 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
410 } else {
411 tcg_gen_xor_i32(ret, arg1, arg2);
412 tcg_gen_not_i32(ret, ret);
413 }
414}
415
416void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
417{
418 if (TCG_TARGET_HAS_nand_i32) {
419 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
420 } else {
421 tcg_gen_and_i32(ret, arg1, arg2);
422 tcg_gen_not_i32(ret, ret);
423 }
424}
425
426void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
427{
428 if (TCG_TARGET_HAS_nor_i32) {
429 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
430 } else {
431 tcg_gen_or_i32(ret, arg1, arg2);
432 tcg_gen_not_i32(ret, ret);
433 }
434}
435
436void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
437{
438 if (TCG_TARGET_HAS_orc_i32) {
439 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
440 } else {
441 TCGv_i32 t0 = tcg_temp_new_i32();
442 tcg_gen_not_i32(t0, arg2);
443 tcg_gen_or_i32(ret, arg1, t0);
444 tcg_temp_free_i32(t0);
445 }
446}
447
448void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
449{
450 if (TCG_TARGET_HAS_rot_i32) {
451 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
452 } else {
453 TCGv_i32 t0, t1;
454
455 t0 = tcg_temp_new_i32();
456 t1 = tcg_temp_new_i32();
457 tcg_gen_shl_i32(t0, arg1, arg2);
458 tcg_gen_subfi_i32(t1, 32, arg2);
459 tcg_gen_shr_i32(t1, arg1, t1);
460 tcg_gen_or_i32(ret, t0, t1);
461 tcg_temp_free_i32(t0);
462 tcg_temp_free_i32(t1);
463 }
464}
465
466void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
467{
468 tcg_debug_assert(arg2 < 32);
469 /* some cases can be optimized here */
470 if (arg2 == 0) {
471 tcg_gen_mov_i32(ret, arg1);
472 } else if (TCG_TARGET_HAS_rot_i32) {
473 TCGv_i32 t0 = tcg_const_i32(arg2);
474 tcg_gen_rotl_i32(ret, arg1, t0);
475 tcg_temp_free_i32(t0);
476 } else {
477 TCGv_i32 t0, t1;
478 t0 = tcg_temp_new_i32();
479 t1 = tcg_temp_new_i32();
480 tcg_gen_shli_i32(t0, arg1, arg2);
481 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
482 tcg_gen_or_i32(ret, t0, t1);
483 tcg_temp_free_i32(t0);
484 tcg_temp_free_i32(t1);
485 }
486}
487
488void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
489{
490 if (TCG_TARGET_HAS_rot_i32) {
491 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
492 } else {
493 TCGv_i32 t0, t1;
494
495 t0 = tcg_temp_new_i32();
496 t1 = tcg_temp_new_i32();
497 tcg_gen_shr_i32(t0, arg1, arg2);
498 tcg_gen_subfi_i32(t1, 32, arg2);
499 tcg_gen_shl_i32(t1, arg1, t1);
500 tcg_gen_or_i32(ret, t0, t1);
501 tcg_temp_free_i32(t0);
502 tcg_temp_free_i32(t1);
503 }
504}
505
506void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
507{
508 tcg_debug_assert(arg2 < 32);
509 /* some cases can be optimized here */
510 if (arg2 == 0) {
511 tcg_gen_mov_i32(ret, arg1);
512 } else {
513 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
514 }
515}
516
517void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
518 unsigned int ofs, unsigned int len)
519{
520 uint32_t mask;
521 TCGv_i32 t1;
522
523 tcg_debug_assert(ofs < 32);
524 tcg_debug_assert(len <= 32);
525 tcg_debug_assert(ofs + len <= 32);
526
527 if (ofs == 0 && len == 32) {
528 tcg_gen_mov_i32(ret, arg2);
529 return;
530 }
531 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
532 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
533 return;
534 }
535
536 mask = (1u << len) - 1;
537 t1 = tcg_temp_new_i32();
538
539 if (ofs + len < 32) {
540 tcg_gen_andi_i32(t1, arg2, mask);
541 tcg_gen_shli_i32(t1, t1, ofs);
542 } else {
543 tcg_gen_shli_i32(t1, arg2, ofs);
544 }
545 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
546 tcg_gen_or_i32(ret, ret, t1);
547
548 tcg_temp_free_i32(t1);
549}
550
551void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
552 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
553{
Richard Henderson37ed3bf2015-02-20 11:13:50 -0800554 if (cond == TCG_COND_ALWAYS) {
555 tcg_gen_mov_i32(ret, v1);
556 } else if (cond == TCG_COND_NEVER) {
557 tcg_gen_mov_i32(ret, v2);
558 } else if (TCG_TARGET_HAS_movcond_i32) {
Richard Henderson951c6302014-09-19 11:39:20 -0700559 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
560 } else {
561 TCGv_i32 t0 = tcg_temp_new_i32();
562 TCGv_i32 t1 = tcg_temp_new_i32();
563 tcg_gen_setcond_i32(cond, t0, c1, c2);
564 tcg_gen_neg_i32(t0, t0);
565 tcg_gen_and_i32(t1, v1, t0);
566 tcg_gen_andc_i32(ret, v2, t0);
567 tcg_gen_or_i32(ret, ret, t1);
568 tcg_temp_free_i32(t0);
569 tcg_temp_free_i32(t1);
570 }
571}
572
573void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
574 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
575{
576 if (TCG_TARGET_HAS_add2_i32) {
577 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
Richard Henderson951c6302014-09-19 11:39:20 -0700578 } else {
579 TCGv_i64 t0 = tcg_temp_new_i64();
580 TCGv_i64 t1 = tcg_temp_new_i64();
581 tcg_gen_concat_i32_i64(t0, al, ah);
582 tcg_gen_concat_i32_i64(t1, bl, bh);
583 tcg_gen_add_i64(t0, t0, t1);
584 tcg_gen_extr_i64_i32(rl, rh, t0);
585 tcg_temp_free_i64(t0);
586 tcg_temp_free_i64(t1);
587 }
588}
589
590void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
591 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
592{
593 if (TCG_TARGET_HAS_sub2_i32) {
594 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
Richard Henderson951c6302014-09-19 11:39:20 -0700595 } else {
596 TCGv_i64 t0 = tcg_temp_new_i64();
597 TCGv_i64 t1 = tcg_temp_new_i64();
598 tcg_gen_concat_i32_i64(t0, al, ah);
599 tcg_gen_concat_i32_i64(t1, bl, bh);
600 tcg_gen_sub_i64(t0, t0, t1);
601 tcg_gen_extr_i64_i32(rl, rh, t0);
602 tcg_temp_free_i64(t0);
603 tcg_temp_free_i64(t1);
604 }
605}
606
607void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
608{
609 if (TCG_TARGET_HAS_mulu2_i32) {
610 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
Richard Henderson951c6302014-09-19 11:39:20 -0700611 } else if (TCG_TARGET_HAS_muluh_i32) {
612 TCGv_i32 t = tcg_temp_new_i32();
613 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
614 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
615 tcg_gen_mov_i32(rl, t);
616 tcg_temp_free_i32(t);
617 } else {
618 TCGv_i64 t0 = tcg_temp_new_i64();
619 TCGv_i64 t1 = tcg_temp_new_i64();
620 tcg_gen_extu_i32_i64(t0, arg1);
621 tcg_gen_extu_i32_i64(t1, arg2);
622 tcg_gen_mul_i64(t0, t0, t1);
623 tcg_gen_extr_i64_i32(rl, rh, t0);
624 tcg_temp_free_i64(t0);
625 tcg_temp_free_i64(t1);
626 }
627}
628
629void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
630{
631 if (TCG_TARGET_HAS_muls2_i32) {
632 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
Richard Henderson951c6302014-09-19 11:39:20 -0700633 } else if (TCG_TARGET_HAS_mulsh_i32) {
634 TCGv_i32 t = tcg_temp_new_i32();
635 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
636 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
637 tcg_gen_mov_i32(rl, t);
638 tcg_temp_free_i32(t);
639 } else if (TCG_TARGET_REG_BITS == 32) {
640 TCGv_i32 t0 = tcg_temp_new_i32();
641 TCGv_i32 t1 = tcg_temp_new_i32();
642 TCGv_i32 t2 = tcg_temp_new_i32();
643 TCGv_i32 t3 = tcg_temp_new_i32();
644 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
645 /* Adjust for negative inputs. */
646 tcg_gen_sari_i32(t2, arg1, 31);
647 tcg_gen_sari_i32(t3, arg2, 31);
648 tcg_gen_and_i32(t2, t2, arg2);
649 tcg_gen_and_i32(t3, t3, arg1);
650 tcg_gen_sub_i32(rh, t1, t2);
651 tcg_gen_sub_i32(rh, rh, t3);
652 tcg_gen_mov_i32(rl, t0);
653 tcg_temp_free_i32(t0);
654 tcg_temp_free_i32(t1);
655 tcg_temp_free_i32(t2);
656 tcg_temp_free_i32(t3);
657 } else {
658 TCGv_i64 t0 = tcg_temp_new_i64();
659 TCGv_i64 t1 = tcg_temp_new_i64();
660 tcg_gen_ext_i32_i64(t0, arg1);
661 tcg_gen_ext_i32_i64(t1, arg2);
662 tcg_gen_mul_i64(t0, t0, t1);
663 tcg_gen_extr_i64_i32(rl, rh, t0);
664 tcg_temp_free_i64(t0);
665 tcg_temp_free_i64(t1);
666 }
667}
668
669void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
670{
671 if (TCG_TARGET_HAS_ext8s_i32) {
672 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
673 } else {
674 tcg_gen_shli_i32(ret, arg, 24);
675 tcg_gen_sari_i32(ret, ret, 24);
676 }
677}
678
679void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
680{
681 if (TCG_TARGET_HAS_ext16s_i32) {
682 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
683 } else {
684 tcg_gen_shli_i32(ret, arg, 16);
685 tcg_gen_sari_i32(ret, ret, 16);
686 }
687}
688
689void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
690{
691 if (TCG_TARGET_HAS_ext8u_i32) {
692 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
693 } else {
694 tcg_gen_andi_i32(ret, arg, 0xffu);
695 }
696}
697
698void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
699{
700 if (TCG_TARGET_HAS_ext16u_i32) {
701 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
702 } else {
703 tcg_gen_andi_i32(ret, arg, 0xffffu);
704 }
705}
706
707/* Note: we assume the two high bytes are set to zero */
708void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
709{
710 if (TCG_TARGET_HAS_bswap16_i32) {
711 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
712 } else {
713 TCGv_i32 t0 = tcg_temp_new_i32();
714
715 tcg_gen_ext8u_i32(t0, arg);
716 tcg_gen_shli_i32(t0, t0, 8);
717 tcg_gen_shri_i32(ret, arg, 8);
718 tcg_gen_or_i32(ret, ret, t0);
719 tcg_temp_free_i32(t0);
720 }
721}
722
723void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
724{
725 if (TCG_TARGET_HAS_bswap32_i32) {
726 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
727 } else {
728 TCGv_i32 t0, t1;
729 t0 = tcg_temp_new_i32();
730 t1 = tcg_temp_new_i32();
731
732 tcg_gen_shli_i32(t0, arg, 24);
733
734 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
735 tcg_gen_shli_i32(t1, t1, 8);
736 tcg_gen_or_i32(t0, t0, t1);
737
738 tcg_gen_shri_i32(t1, arg, 8);
739 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
740 tcg_gen_or_i32(t0, t0, t1);
741
742 tcg_gen_shri_i32(t1, arg, 24);
743 tcg_gen_or_i32(ret, t0, t1);
744 tcg_temp_free_i32(t0);
745 tcg_temp_free_i32(t1);
746 }
747}
748
749/* 64-bit ops */
750
751#if TCG_TARGET_REG_BITS == 32
752/* These are all inline for TCG_TARGET_REG_BITS == 64. */
753
754void tcg_gen_discard_i64(TCGv_i64 arg)
755{
756 tcg_gen_discard_i32(TCGV_LOW(arg));
757 tcg_gen_discard_i32(TCGV_HIGH(arg));
758}
759
760void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
761{
762 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
763 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
764}
765
766void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
767{
768 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
769 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
770}
771
772void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
773{
774 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
775 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
776}
777
778void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
779{
780 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
781 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
782}
783
784void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
785{
786 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
787 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
788}
789
790void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
791{
792 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
793 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
794}
795
796void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
797{
798 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
799 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
800}
801
802void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
803{
804 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
805 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
806}
807
808void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
809{
810 /* Since arg2 and ret have different types,
811 they cannot be the same temporary */
Peter Maydellcf811ff2015-04-08 20:57:09 +0100812#ifdef HOST_WORDS_BIGENDIAN
Richard Henderson951c6302014-09-19 11:39:20 -0700813 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
814 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
815#else
816 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
817 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
818#endif
819}
820
821void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
822{
Peter Maydellcf811ff2015-04-08 20:57:09 +0100823#ifdef HOST_WORDS_BIGENDIAN
Richard Henderson951c6302014-09-19 11:39:20 -0700824 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
825 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
826#else
827 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
828 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
829#endif
830}
831
832void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
833{
834 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
835 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
836}
837
838void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
839{
840 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
841 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
842}
843
844void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
845{
846 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
847 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
848}
849
850void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
851{
852 gen_helper_shl_i64(ret, arg1, arg2);
853}
854
855void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
856{
857 gen_helper_shr_i64(ret, arg1, arg2);
858}
859
860void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
861{
862 gen_helper_sar_i64(ret, arg1, arg2);
863}
864
865void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
866{
867 TCGv_i64 t0;
868 TCGv_i32 t1;
869
870 t0 = tcg_temp_new_i64();
871 t1 = tcg_temp_new_i32();
872
873 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
874 TCGV_LOW(arg1), TCGV_LOW(arg2));
875
876 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
877 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
878 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
879 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
880
881 tcg_gen_mov_i64(ret, t0);
882 tcg_temp_free_i64(t0);
883 tcg_temp_free_i32(t1);
884}
885#endif /* TCG_TARGET_REG_SIZE == 32 */
886
887void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
888{
889 /* some cases can be optimized here */
890 if (arg2 == 0) {
891 tcg_gen_mov_i64(ret, arg1);
892 } else {
893 TCGv_i64 t0 = tcg_const_i64(arg2);
894 tcg_gen_add_i64(ret, arg1, t0);
895 tcg_temp_free_i64(t0);
896 }
897}
898
899void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
900{
901 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
902 /* Don't recurse with tcg_gen_neg_i64. */
903 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
904 } else {
905 TCGv_i64 t0 = tcg_const_i64(arg1);
906 tcg_gen_sub_i64(ret, t0, arg2);
907 tcg_temp_free_i64(t0);
908 }
909}
910
911void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
912{
913 /* some cases can be optimized here */
914 if (arg2 == 0) {
915 tcg_gen_mov_i64(ret, arg1);
916 } else {
917 TCGv_i64 t0 = tcg_const_i64(arg2);
918 tcg_gen_sub_i64(ret, arg1, t0);
919 tcg_temp_free_i64(t0);
920 }
921}
922
923void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
924{
Richard Henderson951c6302014-09-19 11:39:20 -0700925 TCGv_i64 t0;
Richard Henderson3a13c3f2014-09-19 11:41:05 -0700926
927 if (TCG_TARGET_REG_BITS == 32) {
928 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
929 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
930 return;
931 }
932
Richard Henderson951c6302014-09-19 11:39:20 -0700933 /* Some cases can be optimized here. */
934 switch (arg2) {
935 case 0:
936 tcg_gen_movi_i64(ret, 0);
937 return;
938 case 0xffffffffffffffffull:
939 tcg_gen_mov_i64(ret, arg1);
940 return;
941 case 0xffull:
942 /* Don't recurse with tcg_gen_ext8u_i64. */
943 if (TCG_TARGET_HAS_ext8u_i64) {
944 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
945 return;
946 }
947 break;
948 case 0xffffu:
949 if (TCG_TARGET_HAS_ext16u_i64) {
950 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
951 return;
952 }
953 break;
954 case 0xffffffffull:
955 if (TCG_TARGET_HAS_ext32u_i64) {
956 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
957 return;
958 }
959 break;
960 }
961 t0 = tcg_const_i64(arg2);
962 tcg_gen_and_i64(ret, arg1, t0);
963 tcg_temp_free_i64(t0);
Richard Henderson951c6302014-09-19 11:39:20 -0700964}
965
966void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
967{
Richard Henderson3a13c3f2014-09-19 11:41:05 -0700968 if (TCG_TARGET_REG_BITS == 32) {
969 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
970 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
971 return;
972 }
Richard Henderson951c6302014-09-19 11:39:20 -0700973 /* Some cases can be optimized here. */
974 if (arg2 == -1) {
975 tcg_gen_movi_i64(ret, -1);
976 } else if (arg2 == 0) {
977 tcg_gen_mov_i64(ret, arg1);
978 } else {
979 TCGv_i64 t0 = tcg_const_i64(arg2);
980 tcg_gen_or_i64(ret, arg1, t0);
981 tcg_temp_free_i64(t0);
982 }
Richard Henderson951c6302014-09-19 11:39:20 -0700983}
984
985void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
986{
Richard Henderson3a13c3f2014-09-19 11:41:05 -0700987 if (TCG_TARGET_REG_BITS == 32) {
988 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
989 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
990 return;
991 }
Richard Henderson951c6302014-09-19 11:39:20 -0700992 /* Some cases can be optimized here. */
993 if (arg2 == 0) {
994 tcg_gen_mov_i64(ret, arg1);
995 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
996 /* Don't recurse with tcg_gen_not_i64. */
997 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
998 } else {
999 TCGv_i64 t0 = tcg_const_i64(arg2);
1000 tcg_gen_xor_i64(ret, arg1, t0);
1001 tcg_temp_free_i64(t0);
1002 }
Richard Henderson951c6302014-09-19 11:39:20 -07001003}
1004
Richard Henderson951c6302014-09-19 11:39:20 -07001005static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1006 unsigned c, bool right, bool arith)
1007{
1008 tcg_debug_assert(c < 64);
1009 if (c == 0) {
1010 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1011 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1012 } else if (c >= 32) {
1013 c -= 32;
1014 if (right) {
1015 if (arith) {
1016 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1017 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1018 } else {
1019 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1020 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1021 }
1022 } else {
1023 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1024 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1025 }
1026 } else {
1027 TCGv_i32 t0, t1;
1028
1029 t0 = tcg_temp_new_i32();
1030 t1 = tcg_temp_new_i32();
1031 if (right) {
1032 tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1033 if (arith) {
1034 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1035 } else {
1036 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1037 }
1038 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1039 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1040 tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1041 } else {
1042 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1043 /* Note: ret can be the same as arg1, so we use t1 */
1044 tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1045 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1046 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1047 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1048 }
1049 tcg_temp_free_i32(t0);
1050 tcg_temp_free_i32(t1);
1051 }
1052}
1053
1054void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1055{
Richard Henderson951c6302014-09-19 11:39:20 -07001056 tcg_debug_assert(arg2 < 64);
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001057 if (TCG_TARGET_REG_BITS == 32) {
1058 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1059 } else if (arg2 == 0) {
Richard Henderson951c6302014-09-19 11:39:20 -07001060 tcg_gen_mov_i64(ret, arg1);
1061 } else {
1062 TCGv_i64 t0 = tcg_const_i64(arg2);
1063 tcg_gen_shl_i64(ret, arg1, t0);
1064 tcg_temp_free_i64(t0);
1065 }
1066}
1067
1068void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1069{
1070 tcg_debug_assert(arg2 < 64);
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001071 if (TCG_TARGET_REG_BITS == 32) {
1072 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1073 } else if (arg2 == 0) {
Richard Henderson951c6302014-09-19 11:39:20 -07001074 tcg_gen_mov_i64(ret, arg1);
1075 } else {
1076 TCGv_i64 t0 = tcg_const_i64(arg2);
1077 tcg_gen_shr_i64(ret, arg1, t0);
1078 tcg_temp_free_i64(t0);
1079 }
1080}
1081
1082void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1083{
1084 tcg_debug_assert(arg2 < 64);
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001085 if (TCG_TARGET_REG_BITS == 32) {
1086 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1087 } else if (arg2 == 0) {
Richard Henderson951c6302014-09-19 11:39:20 -07001088 tcg_gen_mov_i64(ret, arg1);
1089 } else {
1090 TCGv_i64 t0 = tcg_const_i64(arg2);
1091 tcg_gen_sar_i64(ret, arg1, t0);
1092 tcg_temp_free_i64(t0);
1093 }
1094}
Richard Henderson951c6302014-09-19 11:39:20 -07001095
Richard Henderson42a268c2015-02-13 12:51:55 -08001096void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
Richard Henderson951c6302014-09-19 11:39:20 -07001097{
1098 if (cond == TCG_COND_ALWAYS) {
Richard Henderson42a268c2015-02-13 12:51:55 -08001099 tcg_gen_br(l);
Richard Henderson951c6302014-09-19 11:39:20 -07001100 } else if (cond != TCG_COND_NEVER) {
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001101 if (TCG_TARGET_REG_BITS == 32) {
1102 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1103 TCGV_HIGH(arg1), TCGV_LOW(arg2),
Richard Henderson42a268c2015-02-13 12:51:55 -08001104 TCGV_HIGH(arg2), cond, label_arg(l));
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001105 } else {
Richard Henderson42a268c2015-02-13 12:51:55 -08001106 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1107 label_arg(l));
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001108 }
Richard Henderson951c6302014-09-19 11:39:20 -07001109 }
1110}
1111
Richard Henderson42a268c2015-02-13 12:51:55 -08001112void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
Richard Henderson951c6302014-09-19 11:39:20 -07001113{
1114 if (cond == TCG_COND_ALWAYS) {
Richard Henderson42a268c2015-02-13 12:51:55 -08001115 tcg_gen_br(l);
Richard Henderson951c6302014-09-19 11:39:20 -07001116 } else if (cond != TCG_COND_NEVER) {
1117 TCGv_i64 t0 = tcg_const_i64(arg2);
Richard Henderson42a268c2015-02-13 12:51:55 -08001118 tcg_gen_brcond_i64(cond, arg1, t0, l);
Richard Henderson951c6302014-09-19 11:39:20 -07001119 tcg_temp_free_i64(t0);
1120 }
1121}
1122
1123void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1124 TCGv_i64 arg1, TCGv_i64 arg2)
1125{
1126 if (cond == TCG_COND_ALWAYS) {
1127 tcg_gen_movi_i64(ret, 1);
1128 } else if (cond == TCG_COND_NEVER) {
1129 tcg_gen_movi_i64(ret, 0);
1130 } else {
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001131 if (TCG_TARGET_REG_BITS == 32) {
1132 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1133 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1134 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1135 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1136 } else {
1137 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1138 }
Richard Henderson951c6302014-09-19 11:39:20 -07001139 }
1140}
1141
1142void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1143 TCGv_i64 arg1, int64_t arg2)
1144{
1145 TCGv_i64 t0 = tcg_const_i64(arg2);
1146 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1147 tcg_temp_free_i64(t0);
1148}
1149
1150void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1151{
1152 TCGv_i64 t0 = tcg_const_i64(arg2);
1153 tcg_gen_mul_i64(ret, arg1, t0);
1154 tcg_temp_free_i64(t0);
1155}
1156
1157void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1158{
1159 if (TCG_TARGET_HAS_div_i64) {
1160 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1161 } else if (TCG_TARGET_HAS_div2_i64) {
1162 TCGv_i64 t0 = tcg_temp_new_i64();
1163 tcg_gen_sari_i64(t0, arg1, 63);
1164 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1165 tcg_temp_free_i64(t0);
1166 } else {
1167 gen_helper_div_i64(ret, arg1, arg2);
1168 }
1169}
1170
1171void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1172{
1173 if (TCG_TARGET_HAS_rem_i64) {
1174 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1175 } else if (TCG_TARGET_HAS_div_i64) {
1176 TCGv_i64 t0 = tcg_temp_new_i64();
1177 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1178 tcg_gen_mul_i64(t0, t0, arg2);
1179 tcg_gen_sub_i64(ret, arg1, t0);
1180 tcg_temp_free_i64(t0);
1181 } else if (TCG_TARGET_HAS_div2_i64) {
1182 TCGv_i64 t0 = tcg_temp_new_i64();
1183 tcg_gen_sari_i64(t0, arg1, 63);
1184 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1185 tcg_temp_free_i64(t0);
1186 } else {
1187 gen_helper_rem_i64(ret, arg1, arg2);
1188 }
1189}
1190
1191void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1192{
1193 if (TCG_TARGET_HAS_div_i64) {
1194 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1195 } else if (TCG_TARGET_HAS_div2_i64) {
1196 TCGv_i64 t0 = tcg_temp_new_i64();
1197 tcg_gen_movi_i64(t0, 0);
1198 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1199 tcg_temp_free_i64(t0);
1200 } else {
1201 gen_helper_divu_i64(ret, arg1, arg2);
1202 }
1203}
1204
1205void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1206{
1207 if (TCG_TARGET_HAS_rem_i64) {
1208 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1209 } else if (TCG_TARGET_HAS_div_i64) {
1210 TCGv_i64 t0 = tcg_temp_new_i64();
1211 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1212 tcg_gen_mul_i64(t0, t0, arg2);
1213 tcg_gen_sub_i64(ret, arg1, t0);
1214 tcg_temp_free_i64(t0);
1215 } else if (TCG_TARGET_HAS_div2_i64) {
1216 TCGv_i64 t0 = tcg_temp_new_i64();
1217 tcg_gen_movi_i64(t0, 0);
1218 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1219 tcg_temp_free_i64(t0);
1220 } else {
1221 gen_helper_remu_i64(ret, arg1, arg2);
1222 }
1223}
1224
1225void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1226{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001227 if (TCG_TARGET_REG_BITS == 32) {
1228 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1229 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1230 } else if (TCG_TARGET_HAS_ext8s_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001231 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1232 } else {
1233 tcg_gen_shli_i64(ret, arg, 56);
1234 tcg_gen_sari_i64(ret, ret, 56);
1235 }
Richard Henderson951c6302014-09-19 11:39:20 -07001236}
1237
1238void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1239{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001240 if (TCG_TARGET_REG_BITS == 32) {
1241 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1242 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1243 } else if (TCG_TARGET_HAS_ext16s_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001244 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1245 } else {
1246 tcg_gen_shli_i64(ret, arg, 48);
1247 tcg_gen_sari_i64(ret, ret, 48);
1248 }
Richard Henderson951c6302014-09-19 11:39:20 -07001249}
1250
1251void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1252{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001253 if (TCG_TARGET_REG_BITS == 32) {
1254 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1255 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1256 } else if (TCG_TARGET_HAS_ext32s_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001257 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1258 } else {
1259 tcg_gen_shli_i64(ret, arg, 32);
1260 tcg_gen_sari_i64(ret, ret, 32);
1261 }
Richard Henderson951c6302014-09-19 11:39:20 -07001262}
1263
1264void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1265{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001266 if (TCG_TARGET_REG_BITS == 32) {
1267 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1268 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1269 } else if (TCG_TARGET_HAS_ext8u_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001270 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1271 } else {
1272 tcg_gen_andi_i64(ret, arg, 0xffu);
1273 }
Richard Henderson951c6302014-09-19 11:39:20 -07001274}
1275
1276void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1277{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001278 if (TCG_TARGET_REG_BITS == 32) {
1279 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1280 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1281 } else if (TCG_TARGET_HAS_ext16u_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001282 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1283 } else {
1284 tcg_gen_andi_i64(ret, arg, 0xffffu);
1285 }
Richard Henderson951c6302014-09-19 11:39:20 -07001286}
1287
1288void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1289{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001290 if (TCG_TARGET_REG_BITS == 32) {
1291 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1292 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1293 } else if (TCG_TARGET_HAS_ext32u_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001294 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1295 } else {
1296 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1297 }
Richard Henderson951c6302014-09-19 11:39:20 -07001298}
1299
1300/* Note: we assume the six high bytes are set to zero */
1301void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1302{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001303 if (TCG_TARGET_REG_BITS == 32) {
1304 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1305 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1306 } else if (TCG_TARGET_HAS_bswap16_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001307 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1308 } else {
1309 TCGv_i64 t0 = tcg_temp_new_i64();
1310
1311 tcg_gen_ext8u_i64(t0, arg);
1312 tcg_gen_shli_i64(t0, t0, 8);
1313 tcg_gen_shri_i64(ret, arg, 8);
1314 tcg_gen_or_i64(ret, ret, t0);
1315 tcg_temp_free_i64(t0);
1316 }
Richard Henderson951c6302014-09-19 11:39:20 -07001317}
1318
1319/* Note: we assume the four high bytes are set to zero */
1320void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1321{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001322 if (TCG_TARGET_REG_BITS == 32) {
1323 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1324 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1325 } else if (TCG_TARGET_HAS_bswap32_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001326 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1327 } else {
1328 TCGv_i64 t0, t1;
1329 t0 = tcg_temp_new_i64();
1330 t1 = tcg_temp_new_i64();
1331
1332 tcg_gen_shli_i64(t0, arg, 24);
1333 tcg_gen_ext32u_i64(t0, t0);
1334
1335 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1336 tcg_gen_shli_i64(t1, t1, 8);
1337 tcg_gen_or_i64(t0, t0, t1);
1338
1339 tcg_gen_shri_i64(t1, arg, 8);
1340 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1341 tcg_gen_or_i64(t0, t0, t1);
1342
1343 tcg_gen_shri_i64(t1, arg, 24);
1344 tcg_gen_or_i64(ret, t0, t1);
1345 tcg_temp_free_i64(t0);
1346 tcg_temp_free_i64(t1);
1347 }
Richard Henderson951c6302014-09-19 11:39:20 -07001348}
1349
1350void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1351{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001352 if (TCG_TARGET_REG_BITS == 32) {
1353 TCGv_i32 t0, t1;
1354 t0 = tcg_temp_new_i32();
1355 t1 = tcg_temp_new_i32();
Richard Henderson951c6302014-09-19 11:39:20 -07001356
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001357 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1358 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1359 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1360 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1361 tcg_temp_free_i32(t0);
1362 tcg_temp_free_i32(t1);
1363 } else if (TCG_TARGET_HAS_bswap64_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001364 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1365 } else {
1366 TCGv_i64 t0 = tcg_temp_new_i64();
1367 TCGv_i64 t1 = tcg_temp_new_i64();
1368
1369 tcg_gen_shli_i64(t0, arg, 56);
1370
1371 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1372 tcg_gen_shli_i64(t1, t1, 40);
1373 tcg_gen_or_i64(t0, t0, t1);
1374
1375 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1376 tcg_gen_shli_i64(t1, t1, 24);
1377 tcg_gen_or_i64(t0, t0, t1);
1378
1379 tcg_gen_andi_i64(t1, arg, 0xff000000);
1380 tcg_gen_shli_i64(t1, t1, 8);
1381 tcg_gen_or_i64(t0, t0, t1);
1382
1383 tcg_gen_shri_i64(t1, arg, 8);
1384 tcg_gen_andi_i64(t1, t1, 0xff000000);
1385 tcg_gen_or_i64(t0, t0, t1);
1386
1387 tcg_gen_shri_i64(t1, arg, 24);
1388 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1389 tcg_gen_or_i64(t0, t0, t1);
1390
1391 tcg_gen_shri_i64(t1, arg, 40);
1392 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1393 tcg_gen_or_i64(t0, t0, t1);
1394
1395 tcg_gen_shri_i64(t1, arg, 56);
1396 tcg_gen_or_i64(ret, t0, t1);
1397 tcg_temp_free_i64(t0);
1398 tcg_temp_free_i64(t1);
1399 }
Richard Henderson951c6302014-09-19 11:39:20 -07001400}
1401
1402void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1403{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001404 if (TCG_TARGET_REG_BITS == 32) {
1405 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1406 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1407 } else if (TCG_TARGET_HAS_not_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001408 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1409 } else {
1410 tcg_gen_xori_i64(ret, arg, -1);
1411 }
Richard Henderson951c6302014-09-19 11:39:20 -07001412}
1413
1414void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1415{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001416 if (TCG_TARGET_REG_BITS == 32) {
1417 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1418 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1419 } else if (TCG_TARGET_HAS_andc_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001420 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1421 } else {
1422 TCGv_i64 t0 = tcg_temp_new_i64();
1423 tcg_gen_not_i64(t0, arg2);
1424 tcg_gen_and_i64(ret, arg1, t0);
1425 tcg_temp_free_i64(t0);
1426 }
Richard Henderson951c6302014-09-19 11:39:20 -07001427}
1428
1429void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1430{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001431 if (TCG_TARGET_REG_BITS == 32) {
1432 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1433 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1434 } else if (TCG_TARGET_HAS_eqv_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001435 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1436 } else {
1437 tcg_gen_xor_i64(ret, arg1, arg2);
1438 tcg_gen_not_i64(ret, ret);
1439 }
Richard Henderson951c6302014-09-19 11:39:20 -07001440}
1441
1442void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1443{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001444 if (TCG_TARGET_REG_BITS == 32) {
1445 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1446 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1447 } else if (TCG_TARGET_HAS_nand_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001448 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1449 } else {
1450 tcg_gen_and_i64(ret, arg1, arg2);
1451 tcg_gen_not_i64(ret, ret);
1452 }
Richard Henderson951c6302014-09-19 11:39:20 -07001453}
1454
1455void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1456{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001457 if (TCG_TARGET_REG_BITS == 32) {
1458 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1459 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1460 } else if (TCG_TARGET_HAS_nor_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001461 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1462 } else {
1463 tcg_gen_or_i64(ret, arg1, arg2);
1464 tcg_gen_not_i64(ret, ret);
1465 }
Richard Henderson951c6302014-09-19 11:39:20 -07001466}
1467
1468void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1469{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001470 if (TCG_TARGET_REG_BITS == 32) {
1471 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1472 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1473 } else if (TCG_TARGET_HAS_orc_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001474 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1475 } else {
1476 TCGv_i64 t0 = tcg_temp_new_i64();
1477 tcg_gen_not_i64(t0, arg2);
1478 tcg_gen_or_i64(ret, arg1, t0);
1479 tcg_temp_free_i64(t0);
1480 }
Richard Henderson951c6302014-09-19 11:39:20 -07001481}
1482
1483void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1484{
1485 if (TCG_TARGET_HAS_rot_i64) {
1486 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1487 } else {
1488 TCGv_i64 t0, t1;
1489 t0 = tcg_temp_new_i64();
1490 t1 = tcg_temp_new_i64();
1491 tcg_gen_shl_i64(t0, arg1, arg2);
1492 tcg_gen_subfi_i64(t1, 64, arg2);
1493 tcg_gen_shr_i64(t1, arg1, t1);
1494 tcg_gen_or_i64(ret, t0, t1);
1495 tcg_temp_free_i64(t0);
1496 tcg_temp_free_i64(t1);
1497 }
1498}
1499
1500void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1501{
1502 tcg_debug_assert(arg2 < 64);
1503 /* some cases can be optimized here */
1504 if (arg2 == 0) {
1505 tcg_gen_mov_i64(ret, arg1);
1506 } else if (TCG_TARGET_HAS_rot_i64) {
1507 TCGv_i64 t0 = tcg_const_i64(arg2);
1508 tcg_gen_rotl_i64(ret, arg1, t0);
1509 tcg_temp_free_i64(t0);
1510 } else {
1511 TCGv_i64 t0, t1;
1512 t0 = tcg_temp_new_i64();
1513 t1 = tcg_temp_new_i64();
1514 tcg_gen_shli_i64(t0, arg1, arg2);
1515 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1516 tcg_gen_or_i64(ret, t0, t1);
1517 tcg_temp_free_i64(t0);
1518 tcg_temp_free_i64(t1);
1519 }
1520}
1521
1522void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1523{
1524 if (TCG_TARGET_HAS_rot_i64) {
1525 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1526 } else {
1527 TCGv_i64 t0, t1;
1528 t0 = tcg_temp_new_i64();
1529 t1 = tcg_temp_new_i64();
1530 tcg_gen_shr_i64(t0, arg1, arg2);
1531 tcg_gen_subfi_i64(t1, 64, arg2);
1532 tcg_gen_shl_i64(t1, arg1, t1);
1533 tcg_gen_or_i64(ret, t0, t1);
1534 tcg_temp_free_i64(t0);
1535 tcg_temp_free_i64(t1);
1536 }
1537}
1538
1539void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1540{
1541 tcg_debug_assert(arg2 < 64);
1542 /* some cases can be optimized here */
1543 if (arg2 == 0) {
1544 tcg_gen_mov_i64(ret, arg1);
1545 } else {
1546 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1547 }
1548}
1549
1550void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1551 unsigned int ofs, unsigned int len)
1552{
1553 uint64_t mask;
1554 TCGv_i64 t1;
1555
1556 tcg_debug_assert(ofs < 64);
1557 tcg_debug_assert(len <= 64);
1558 tcg_debug_assert(ofs + len <= 64);
1559
1560 if (ofs == 0 && len == 64) {
1561 tcg_gen_mov_i64(ret, arg2);
1562 return;
1563 }
1564 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1565 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1566 return;
1567 }
1568
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001569 if (TCG_TARGET_REG_BITS == 32) {
1570 if (ofs >= 32) {
1571 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1572 TCGV_LOW(arg2), ofs - 32, len);
1573 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1574 return;
1575 }
1576 if (ofs + len <= 32) {
1577 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1578 TCGV_LOW(arg2), ofs, len);
1579 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1580 return;
1581 }
Richard Henderson951c6302014-09-19 11:39:20 -07001582 }
Richard Henderson951c6302014-09-19 11:39:20 -07001583
1584 mask = (1ull << len) - 1;
1585 t1 = tcg_temp_new_i64();
1586
1587 if (ofs + len < 64) {
1588 tcg_gen_andi_i64(t1, arg2, mask);
1589 tcg_gen_shli_i64(t1, t1, ofs);
1590 } else {
1591 tcg_gen_shli_i64(t1, arg2, ofs);
1592 }
1593 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1594 tcg_gen_or_i64(ret, ret, t1);
1595
1596 tcg_temp_free_i64(t1);
1597}
1598
1599void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1600 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1601{
Richard Henderson37ed3bf2015-02-20 11:13:50 -08001602 if (cond == TCG_COND_ALWAYS) {
1603 tcg_gen_mov_i64(ret, v1);
1604 } else if (cond == TCG_COND_NEVER) {
1605 tcg_gen_mov_i64(ret, v2);
1606 } else if (TCG_TARGET_REG_BITS == 32) {
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001607 TCGv_i32 t0 = tcg_temp_new_i32();
1608 TCGv_i32 t1 = tcg_temp_new_i32();
1609 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1610 TCGV_LOW(c1), TCGV_HIGH(c1),
1611 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
Richard Henderson951c6302014-09-19 11:39:20 -07001612
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001613 if (TCG_TARGET_HAS_movcond_i32) {
1614 tcg_gen_movi_i32(t1, 0);
1615 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1616 TCGV_LOW(v1), TCGV_LOW(v2));
1617 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1618 TCGV_HIGH(v1), TCGV_HIGH(v2));
1619 } else {
1620 tcg_gen_neg_i32(t0, t0);
Richard Henderson951c6302014-09-19 11:39:20 -07001621
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001622 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1623 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1624 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
Richard Henderson951c6302014-09-19 11:39:20 -07001625
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001626 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1627 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1628 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1629 }
1630 tcg_temp_free_i32(t0);
1631 tcg_temp_free_i32(t1);
1632 } else if (TCG_TARGET_HAS_movcond_i64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001633 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1634 } else {
1635 TCGv_i64 t0 = tcg_temp_new_i64();
1636 TCGv_i64 t1 = tcg_temp_new_i64();
1637 tcg_gen_setcond_i64(cond, t0, c1, c2);
1638 tcg_gen_neg_i64(t0, t0);
1639 tcg_gen_and_i64(t1, v1, t0);
1640 tcg_gen_andc_i64(ret, v2, t0);
1641 tcg_gen_or_i64(ret, ret, t1);
1642 tcg_temp_free_i64(t0);
1643 tcg_temp_free_i64(t1);
1644 }
Richard Henderson951c6302014-09-19 11:39:20 -07001645}
1646
1647void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1648 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1649{
1650 if (TCG_TARGET_HAS_add2_i64) {
1651 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
Richard Henderson951c6302014-09-19 11:39:20 -07001652 } else {
1653 TCGv_i64 t0 = tcg_temp_new_i64();
1654 TCGv_i64 t1 = tcg_temp_new_i64();
1655 tcg_gen_add_i64(t0, al, bl);
1656 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1657 tcg_gen_add_i64(rh, ah, bh);
1658 tcg_gen_add_i64(rh, rh, t1);
1659 tcg_gen_mov_i64(rl, t0);
1660 tcg_temp_free_i64(t0);
1661 tcg_temp_free_i64(t1);
1662 }
1663}
1664
1665void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1666 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1667{
1668 if (TCG_TARGET_HAS_sub2_i64) {
1669 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
Richard Henderson951c6302014-09-19 11:39:20 -07001670 } else {
1671 TCGv_i64 t0 = tcg_temp_new_i64();
1672 TCGv_i64 t1 = tcg_temp_new_i64();
1673 tcg_gen_sub_i64(t0, al, bl);
1674 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1675 tcg_gen_sub_i64(rh, ah, bh);
1676 tcg_gen_sub_i64(rh, rh, t1);
1677 tcg_gen_mov_i64(rl, t0);
1678 tcg_temp_free_i64(t0);
1679 tcg_temp_free_i64(t1);
1680 }
1681}
1682
1683void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1684{
1685 if (TCG_TARGET_HAS_mulu2_i64) {
1686 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
Richard Henderson951c6302014-09-19 11:39:20 -07001687 } else if (TCG_TARGET_HAS_muluh_i64) {
1688 TCGv_i64 t = tcg_temp_new_i64();
1689 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1690 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1691 tcg_gen_mov_i64(rl, t);
1692 tcg_temp_free_i64(t);
1693 } else {
1694 TCGv_i64 t0 = tcg_temp_new_i64();
1695 tcg_gen_mul_i64(t0, arg1, arg2);
1696 gen_helper_muluh_i64(rh, arg1, arg2);
1697 tcg_gen_mov_i64(rl, t0);
1698 tcg_temp_free_i64(t0);
1699 }
1700}
1701
1702void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1703{
1704 if (TCG_TARGET_HAS_muls2_i64) {
1705 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
Richard Henderson951c6302014-09-19 11:39:20 -07001706 } else if (TCG_TARGET_HAS_mulsh_i64) {
1707 TCGv_i64 t = tcg_temp_new_i64();
1708 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1709 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1710 tcg_gen_mov_i64(rl, t);
1711 tcg_temp_free_i64(t);
1712 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1713 TCGv_i64 t0 = tcg_temp_new_i64();
1714 TCGv_i64 t1 = tcg_temp_new_i64();
1715 TCGv_i64 t2 = tcg_temp_new_i64();
1716 TCGv_i64 t3 = tcg_temp_new_i64();
1717 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1718 /* Adjust for negative inputs. */
1719 tcg_gen_sari_i64(t2, arg1, 63);
1720 tcg_gen_sari_i64(t3, arg2, 63);
1721 tcg_gen_and_i64(t2, t2, arg2);
1722 tcg_gen_and_i64(t3, t3, arg1);
1723 tcg_gen_sub_i64(rh, t1, t2);
1724 tcg_gen_sub_i64(rh, rh, t3);
1725 tcg_gen_mov_i64(rl, t0);
1726 tcg_temp_free_i64(t0);
1727 tcg_temp_free_i64(t1);
1728 tcg_temp_free_i64(t2);
1729 tcg_temp_free_i64(t3);
1730 } else {
1731 TCGv_i64 t0 = tcg_temp_new_i64();
1732 tcg_gen_mul_i64(t0, arg1, arg2);
1733 gen_helper_mulsh_i64(rh, arg1, arg2);
1734 tcg_gen_mov_i64(rl, t0);
1735 tcg_temp_free_i64(t0);
1736 }
1737}
1738
1739/* Size changing operations. */
1740
Richard Henderson609ad702015-07-24 07:16:00 -07001741void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
Richard Henderson951c6302014-09-19 11:39:20 -07001742{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001743 if (TCG_TARGET_REG_BITS == 32) {
Richard Henderson609ad702015-07-24 07:16:00 -07001744 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1745 } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1746 tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1747 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1748 } else {
Richard Henderson951c6302014-09-19 11:39:20 -07001749 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
Richard Henderson609ad702015-07-24 07:16:00 -07001750 }
1751}
1752
1753void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1754{
1755 if (TCG_TARGET_REG_BITS == 32) {
1756 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1757 } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1758 tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1759 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
Richard Henderson951c6302014-09-19 11:39:20 -07001760 } else {
1761 TCGv_i64 t = tcg_temp_new_i64();
Richard Henderson609ad702015-07-24 07:16:00 -07001762 tcg_gen_shri_i64(t, arg, 32);
Richard Henderson951c6302014-09-19 11:39:20 -07001763 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1764 tcg_temp_free_i64(t);
1765 }
Richard Henderson951c6302014-09-19 11:39:20 -07001766}
1767
1768void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1769{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001770 if (TCG_TARGET_REG_BITS == 32) {
1771 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1772 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1773 } else {
Aurelien Jarno4f2331e2015-07-27 12:41:45 +02001774 tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1775 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001776 }
Richard Henderson951c6302014-09-19 11:39:20 -07001777}
1778
1779void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1780{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001781 if (TCG_TARGET_REG_BITS == 32) {
1782 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1783 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1784 } else {
Aurelien Jarno4f2331e2015-07-27 12:41:45 +02001785 tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1786 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001787 }
Richard Henderson951c6302014-09-19 11:39:20 -07001788}
1789
1790void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1791{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001792 TCGv_i64 tmp;
1793
1794 if (TCG_TARGET_REG_BITS == 32) {
1795 tcg_gen_mov_i32(TCGV_LOW(dest), low);
1796 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1797 return;
1798 }
1799
1800 tmp = tcg_temp_new_i64();
Richard Henderson951c6302014-09-19 11:39:20 -07001801 /* These extensions are only needed for type correctness.
1802 We may be able to do better given target specific information. */
1803 tcg_gen_extu_i32_i64(tmp, high);
1804 tcg_gen_extu_i32_i64(dest, low);
1805 /* If deposit is available, use it. Otherwise use the extra
1806 knowledge that we have of the zero-extensions above. */
1807 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1808 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1809 } else {
1810 tcg_gen_shli_i64(tmp, tmp, 32);
1811 tcg_gen_or_i64(dest, dest, tmp);
1812 }
1813 tcg_temp_free_i64(tmp);
Richard Henderson951c6302014-09-19 11:39:20 -07001814}
1815
1816void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1817{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001818 if (TCG_TARGET_REG_BITS == 32) {
1819 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1820 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1821 } else {
Richard Henderson609ad702015-07-24 07:16:00 -07001822 tcg_gen_extrl_i64_i32(lo, arg);
1823 tcg_gen_extrh_i64_i32(hi, arg);
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001824 }
Richard Henderson951c6302014-09-19 11:39:20 -07001825}
1826
1827void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1828{
1829 tcg_gen_ext32u_i64(lo, arg);
1830 tcg_gen_shri_i64(hi, arg, 32);
1831}
1832
1833/* QEMU specific operations. */
1834
1835void tcg_gen_goto_tb(unsigned idx)
1836{
1837 /* We only support two chained exits. */
1838 tcg_debug_assert(idx <= 1);
1839#ifdef CONFIG_DEBUG_TCG
1840 /* Verify that we havn't seen this numbered exit before. */
1841 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1842 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1843#endif
1844 tcg_gen_op1i(INDEX_op_goto_tb, idx);
1845}
1846
1847static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1848{
1849 switch (op & MO_SIZE) {
1850 case MO_8:
1851 op &= ~MO_BSWAP;
1852 break;
1853 case MO_16:
1854 break;
1855 case MO_32:
1856 if (!is64) {
1857 op &= ~MO_SIGN;
1858 }
1859 break;
1860 case MO_64:
1861 if (!is64) {
1862 tcg_abort();
1863 }
1864 break;
1865 }
1866 if (st) {
1867 op &= ~MO_SIGN;
1868 }
1869 return op;
1870}
1871
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001872static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1873 TCGMemOp memop, TCGArg idx)
Richard Henderson951c6302014-09-19 11:39:20 -07001874{
Richard Henderson59227d52015-05-12 11:51:44 -07001875 TCGMemOpIdx oi = make_memop_idx(memop, idx);
Richard Henderson951c6302014-09-19 11:39:20 -07001876#if TARGET_LONG_BITS == 32
Richard Henderson59227d52015-05-12 11:51:44 -07001877 tcg_gen_op3i_i32(opc, val, addr, oi);
Richard Henderson951c6302014-09-19 11:39:20 -07001878#else
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001879 if (TCG_TARGET_REG_BITS == 32) {
Richard Henderson59227d52015-05-12 11:51:44 -07001880 tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001881 } else {
Richard Henderson59227d52015-05-12 11:51:44 -07001882 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001883 }
Richard Henderson951c6302014-09-19 11:39:20 -07001884#endif
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001885}
1886
1887static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1888 TCGMemOp memop, TCGArg idx)
1889{
Richard Henderson59227d52015-05-12 11:51:44 -07001890 TCGMemOpIdx oi = make_memop_idx(memop, idx);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001891#if TARGET_LONG_BITS == 32
1892 if (TCG_TARGET_REG_BITS == 32) {
Richard Henderson59227d52015-05-12 11:51:44 -07001893 tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001894 } else {
Richard Henderson59227d52015-05-12 11:51:44 -07001895 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001896 }
1897#else
1898 if (TCG_TARGET_REG_BITS == 32) {
Richard Henderson59227d52015-05-12 11:51:44 -07001899 tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1900 TCGV_LOW(addr), TCGV_HIGH(addr), oi);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001901 } else {
Richard Henderson59227d52015-05-12 11:51:44 -07001902 tcg_gen_op3i_i64(opc, val, addr, oi);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001903 }
1904#endif
1905}
Richard Henderson951c6302014-09-19 11:39:20 -07001906
1907void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1908{
1909 memop = tcg_canonicalize_memop(memop, 0, 0);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001910 gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
Richard Henderson951c6302014-09-19 11:39:20 -07001911}
1912
1913void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1914{
1915 memop = tcg_canonicalize_memop(memop, 0, 1);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001916 gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
Richard Henderson951c6302014-09-19 11:39:20 -07001917}
1918
1919void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1920{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001921 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001922 tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1923 if (memop & MO_SIGN) {
1924 tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1925 } else {
1926 tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1927 }
1928 return;
1929 }
Richard Henderson951c6302014-09-19 11:39:20 -07001930
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001931 memop = tcg_canonicalize_memop(memop, 1, 0);
1932 gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
Richard Henderson951c6302014-09-19 11:39:20 -07001933}
1934
1935void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1936{
Richard Henderson3a13c3f2014-09-19 11:41:05 -07001937 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
Richard Henderson951c6302014-09-19 11:39:20 -07001938 tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1939 return;
1940 }
Richard Henderson951c6302014-09-19 11:39:20 -07001941
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001942 memop = tcg_canonicalize_memop(memop, 1, 1);
1943 gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
Richard Henderson951c6302014-09-19 11:39:20 -07001944}