mirror of https://github.com/xqemu/xqemu.git
tcg: add ext{8,16,32}u_i{32,64} TCG ops
Currently zero extensions ops are implemented by a and op with a constant. This is then catched in some backend, and replaced by a zero extension instruction. While this works well on RISC machines, this adds a useless register move on non-RISC machines. Example on x86: ext16u_i32 r1, r2 is translated into mov %eax,%ebx movzwl %bx, %ebx while the optimized version should be: movzwl %ax, %ebx This patch adds ext{8,16,32}u_i{32,64} TCG ops that can be implemented in the backends to avoid emitting useless register moves. Signed-off-by: Aurelien Jarno <aurelien@aurel32.net>
This commit is contained in:
parent
3bc0bdcaad
commit
cfc86988a8
24
tcg/tcg-op.h
24
tcg/tcg-op.h
|
@ -1189,16 +1189,22 @@ static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
|
|||
#endif
|
||||
}
|
||||
|
||||
/* These are currently just for convenience.
|
||||
We assume a target will recognise these automatically . */
|
||||
static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
|
||||
{
|
||||
#ifdef TCG_TARGET_HAS_ext8u_i32
|
||||
tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
|
||||
#else
|
||||
tcg_gen_andi_i32(ret, arg, 0xffu);
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
|
||||
{
|
||||
#ifdef TCG_TARGET_HAS_ext16u_i32
|
||||
tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
|
||||
#else
|
||||
tcg_gen_andi_i32(ret, arg, 0xffffu);
|
||||
#endif
|
||||
}
|
||||
|
||||
/* Note: we assume the two high bytes are set to zero */
|
||||
|
@ -1358,17 +1364,29 @@ static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
|
|||
|
||||
static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
|
||||
{
|
||||
#ifdef TCG_TARGET_HAS_ext8u_i64
|
||||
tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
|
||||
#else
|
||||
tcg_gen_andi_i64(ret, arg, 0xffu);
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
|
||||
{
|
||||
#ifdef TCG_TARGET_HAS_ext16u_i64
|
||||
tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
|
||||
#else
|
||||
tcg_gen_andi_i64(ret, arg, 0xffffu);
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
|
||||
{
|
||||
#ifdef TCG_TARGET_HAS_ext32u_i64
|
||||
tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
|
||||
#else
|
||||
tcg_gen_andi_i64(ret, arg, 0xffffffffu);
|
||||
#endif
|
||||
}
|
||||
|
||||
/* Note: we assume the target supports move between 32 and 64 bit
|
||||
|
@ -1382,7 +1400,7 @@ static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
|
|||
registers */
|
||||
static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
|
||||
{
|
||||
tcg_gen_andi_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)), 0xffffffffu);
|
||||
tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
|
||||
}
|
||||
|
||||
/* Note: we assume the target supports move between 32 and 64 bit
|
||||
|
|
|
@ -89,6 +89,12 @@ DEF2(ext8s_i32, 1, 1, 0, 0)
|
|||
#ifdef TCG_TARGET_HAS_ext16s_i32
|
||||
DEF2(ext16s_i32, 1, 1, 0, 0)
|
||||
#endif
|
||||
#ifdef TCG_TARGET_HAS_ext8u_i32
|
||||
DEF2(ext8u_i32, 1, 1, 0, 0)
|
||||
#endif
|
||||
#ifdef TCG_TARGET_HAS_ext16u_i32
|
||||
DEF2(ext16u_i32, 1, 1, 0, 0)
|
||||
#endif
|
||||
#ifdef TCG_TARGET_HAS_bswap16_i32
|
||||
DEF2(bswap16_i32, 1, 1, 0, 0)
|
||||
#endif
|
||||
|
@ -152,6 +158,15 @@ DEF2(ext16s_i64, 1, 1, 0, 0)
|
|||
#ifdef TCG_TARGET_HAS_ext32s_i64
|
||||
DEF2(ext32s_i64, 1, 1, 0, 0)
|
||||
#endif
|
||||
#ifdef TCG_TARGET_HAS_ext8u_i64
|
||||
DEF2(ext8u_i64, 1, 1, 0, 0)
|
||||
#endif
|
||||
#ifdef TCG_TARGET_HAS_ext16u_i64
|
||||
DEF2(ext16u_i64, 1, 1, 0, 0)
|
||||
#endif
|
||||
#ifdef TCG_TARGET_HAS_ext32u_i64
|
||||
DEF2(ext32u_i64, 1, 1, 0, 0)
|
||||
#endif
|
||||
#ifdef TCG_TARGET_HAS_bswap16_i64
|
||||
DEF2(bswap16_i64, 1, 1, 0, 0)
|
||||
#endif
|
||||
|
|
Loading…
Reference in New Issue