tcg-ops.h: _i64 TCG immediate instructions cleanup

Move addi_i64, muli_i64 and subi_i64 out of #if TCG_TARGET_REG_BITS
as both implementations are strictly identical. Use the same
optimisation (ie when imm == 0) for addi_i64 and subi_64 than the
32-bit version.

Signed-off-by: Aurelien Jarno <aurelien@aurel32.net>

git-svn-id: svn://svn.savannah.nongnu.org/qemu/trunk@5598 c046a42c-6fe2-441c-8c8c-71466251a162
master
aurel32 2008-11-02 08:22:54 +00:00
parent 0cfe58cd44
commit 6359706f93
1 changed files with 31 additions and 42 deletions

View File

@ -673,26 +673,12 @@ static inline void tcg_gen_add_i64(TCGv ret, TCGv arg1, TCGv arg2)
arg1, TCGV_HIGH(arg1), arg2, TCGV_HIGH(arg2));
}
static inline void tcg_gen_addi_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_add_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
static inline void tcg_gen_sub_i64(TCGv ret, TCGv arg1, TCGv arg2)
{
tcg_gen_op6(INDEX_op_sub2_i32, ret, TCGV_HIGH(ret),
arg1, TCGV_HIGH(arg1), arg2, TCGV_HIGH(arg2));
}
static inline void tcg_gen_subi_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_sub_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
static inline void tcg_gen_and_i64(TCGv ret, TCGv arg1, TCGv arg2)
{
tcg_gen_and_i32(ret, arg1, arg2);
@ -788,13 +774,6 @@ static inline void tcg_gen_mul_i64(TCGv ret, TCGv arg1, TCGv arg2)
tcg_temp_free(t1);
}
static inline void tcg_gen_muli_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_mul_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
static inline void tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2)
{
tcg_gen_helper_1_2(tcg_helper_div_i64, ret, arg1, arg2);
@ -897,25 +876,11 @@ static inline void tcg_gen_add_i64(TCGv ret, TCGv arg1, TCGv arg2)
tcg_gen_op3(INDEX_op_add_i64, ret, arg1, arg2);
}
static inline void tcg_gen_addi_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_add_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
static inline void tcg_gen_sub_i64(TCGv ret, TCGv arg1, TCGv arg2)
{
tcg_gen_op3(INDEX_op_sub_i64, ret, arg1, arg2);
}
static inline void tcg_gen_subi_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_sub_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
static inline void tcg_gen_and_i64(TCGv ret, TCGv arg1, TCGv arg2)
{
tcg_gen_op3(INDEX_op_and_i64, ret, arg1, arg2);
@ -1011,13 +976,6 @@ static inline void tcg_gen_mul_i64(TCGv ret, TCGv arg1, TCGv arg2)
tcg_gen_op3(INDEX_op_mul_i64, ret, arg1, arg2);
}
static inline void tcg_gen_muli_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_mul_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
#ifdef TCG_TARGET_HAS_div_i64
static inline void tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2)
{
@ -1078,6 +1036,18 @@ static inline void tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2)
#endif
static inline void tcg_gen_addi_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
} else {
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_add_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
}
static inline void tcg_gen_brcondi_i64(int cond, TCGv arg1, int64_t arg2,
int label_index)
{
@ -1086,6 +1056,25 @@ static inline void tcg_gen_brcondi_i64(int cond, TCGv arg1, int64_t arg2,
tcg_temp_free(t0);
}
static inline void tcg_gen_muli_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_mul_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
static inline void tcg_gen_subi_i64(TCGv ret, TCGv arg1, int64_t arg2)
{
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
} else {
TCGv t0 = tcg_const_i64(arg2);
tcg_gen_sub_i64(ret, arg1, t0);
tcg_temp_free(t0);
}
}
/***************************************/
/* optional operations */