tcg/loongarch64: Support TCG_TYPE_V64
We can implement this with fld_d, fst_d for load and store,
and then use the normal v128 operations in registers.
This will improve support for guests which use v64.
Reviewed-by: Song Gao <gaosong@loongson.cn>
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
index de53695..980ea10 100644
--- a/tcg/loongarch64/tcg-target.c.inc
+++ b/tcg/loongarch64/tcg-target.c.inc
@@ -321,6 +321,7 @@
}
}
break;
+ case TCG_TYPE_V64:
case TCG_TYPE_V128:
tcg_out_opc_vori_b(s, ret, arg, 0);
break;
@@ -838,6 +839,7 @@
}
break;
case TCG_TYPE_I64:
+ case TCG_TYPE_V64:
if (dest < TCG_REG_V0) {
tcg_out_ldst(s, OPC_LD_D, dest, base, offset);
} else {
@@ -869,6 +871,7 @@
}
break;
case TCG_TYPE_I64:
+ case TCG_TYPE_V64:
if (src < TCG_REG_V0) {
tcg_out_ldst(s, OPC_ST_D, src, base, offset);
} else {
@@ -1880,8 +1883,8 @@
a2 = args[2];
a3 = args[3];
- /* Currently only supports V128 */
- tcg_debug_assert(type == TCG_TYPE_V128);
+ /* Currently only supports V64 & V128 */
+ tcg_debug_assert(type == TCG_TYPE_V64 || type == TCG_TYPE_V128);
switch (opc) {
case INDEX_op_st_vec:
@@ -2394,6 +2397,7 @@
tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S9);
if (cpuinfo & CPUINFO_LSX) {
+ tcg_target_available_regs[TCG_TYPE_V64] = ALL_VECTOR_REGS;
tcg_target_available_regs[TCG_TYPE_V128] = ALL_VECTOR_REGS;
tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_V24);
tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_V25);
diff --git a/tcg/loongarch64/tcg-target.h b/tcg/loongarch64/tcg-target.h
index 29e4860..990bad1 100644
--- a/tcg/loongarch64/tcg-target.h
+++ b/tcg/loongarch64/tcg-target.h
@@ -171,7 +171,7 @@
#define TCG_TARGET_HAS_tst 0
-#define TCG_TARGET_HAS_v64 0
+#define TCG_TARGET_HAS_v64 (cpuinfo & CPUINFO_LSX)
#define TCG_TARGET_HAS_v128 (cpuinfo & CPUINFO_LSX)
#define TCG_TARGET_HAS_v256 0