mirror of https://github.com/xemu-project/xemu.git
tcg: convert tb->cflags reads to tb_cflags(tb)
Convert all existing readers of tb->cflags to tb_cflags, so that we use atomic_read and therefore avoid undefined behaviour in C11. Note that the remaining setters/getters of the field are protected by tb_lock, and therefore do not need conversion. Luckily all readers access the field via 'tb->cflags' (so no foo.cflags, bar->cflags in the code base), which makes the conversion easily scriptable: FILES=$(git grep 'tb->cflags' target include/exec/gen-icount.h \ accel/tcg/translator.c | cut -f1 -d':' | sort | uniq) perl -pi -e 's/([^.>])tb->cflags/$1tb_cflags(tb)/g' $FILES perl -pi -e 's/([a-z->.]*)(->|\.)tb->cflags/tb_cflags($1$2tb)/g' $FILES Then manually fixed the few errors that checkpatch reported. Compile-tested for all targets. Suggested-by: Richard Henderson <rth@twiddle.net> Reviewed-by: Richard Henderson <rth@twiddle.net> Signed-off-by: Emilio G. Cota <cota@braap.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
parent
cdfef1715c
commit
c5a49c63fa
|
@ -45,7 +45,7 @@ void translator_loop(const TranslatorOps *ops, DisasContextBase *db,
|
|||
db->singlestep_enabled = cpu->singlestep_enabled;
|
||||
|
||||
/* Instruction counting */
|
||||
max_insns = db->tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(db->tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ void translator_loop(const TranslatorOps *ops, DisasContextBase *db,
|
|||
update db->pc_next and db->is_jmp to indicate what should be
|
||||
done next -- either exiting this loop or locate the start of
|
||||
the next instruction. */
|
||||
if (db->num_insns == max_insns && (db->tb->cflags & CF_LAST_IO)) {
|
||||
if (db->num_insns == max_insns && (tb_cflags(db->tb) & CF_LAST_IO)) {
|
||||
/* Accept I/O on the last instruction. */
|
||||
gen_io_start();
|
||||
ops->translate_insn(db, cpu);
|
||||
|
|
|
@ -13,7 +13,7 @@ static inline void gen_tb_start(TranslationBlock *tb)
|
|||
TCGv_i32 count, imm;
|
||||
|
||||
exitreq_label = gen_new_label();
|
||||
if (tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(tb) & CF_USE_ICOUNT) {
|
||||
count = tcg_temp_local_new_i32();
|
||||
} else {
|
||||
count = tcg_temp_new_i32();
|
||||
|
@ -22,7 +22,7 @@ static inline void gen_tb_start(TranslationBlock *tb)
|
|||
tcg_gen_ld_i32(count, tcg_ctx.tcg_env,
|
||||
-ENV_OFFSET + offsetof(CPUState, icount_decr.u32));
|
||||
|
||||
if (tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(tb) & CF_USE_ICOUNT) {
|
||||
imm = tcg_temp_new_i32();
|
||||
/* We emit a movi with a dummy immediate argument. Keep the insn index
|
||||
* of the movi so that we later (when we know the actual insn count)
|
||||
|
@ -36,7 +36,7 @@ static inline void gen_tb_start(TranslationBlock *tb)
|
|||
|
||||
tcg_gen_brcondi_i32(TCG_COND_LT, count, 0, exitreq_label);
|
||||
|
||||
if (tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(tb) & CF_USE_ICOUNT) {
|
||||
tcg_gen_st16_i32(count, tcg_ctx.tcg_env,
|
||||
-ENV_OFFSET + offsetof(CPUState, icount_decr.u16.low));
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ static inline void gen_tb_start(TranslationBlock *tb)
|
|||
|
||||
static inline void gen_tb_end(TranslationBlock *tb, int num_insns)
|
||||
{
|
||||
if (tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(tb) & CF_USE_ICOUNT) {
|
||||
/* Update the num_insn immediate parameter now that we know
|
||||
* the actual insn count. */
|
||||
tcg_set_insn_param(icount_start_insn_idx, 1, num_insns);
|
||||
|
|
|
@ -455,7 +455,7 @@ static bool in_superpage(DisasContext *ctx, int64_t addr)
|
|||
|
||||
static bool use_exit_tb(DisasContext *ctx)
|
||||
{
|
||||
return ((ctx->base.tb->cflags & CF_LAST_IO)
|
||||
return ((tb_cflags(ctx->base.tb) & CF_LAST_IO)
|
||||
|| ctx->base.singlestep_enabled
|
||||
|| singlestep);
|
||||
}
|
||||
|
@ -2399,7 +2399,7 @@ static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn)
|
|||
case 0xC000:
|
||||
/* RPCC */
|
||||
va = dest_gpr(ctx, ra);
|
||||
if (ctx->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
gen_helper_load_pcc(va, cpu_env);
|
||||
gen_io_end();
|
||||
|
|
|
@ -348,7 +348,8 @@ static inline bool use_goto_tb(DisasContext *s, int n, uint64_t dest)
|
|||
/* No direct tb linking with singlestep (either QEMU's or the ARM
|
||||
* debug architecture kind) or deterministic io
|
||||
*/
|
||||
if (s->base.singlestep_enabled || s->ss_active || (s->base.tb->cflags & CF_LAST_IO)) {
|
||||
if (s->base.singlestep_enabled || s->ss_active ||
|
||||
(tb_cflags(s->base.tb) & CF_LAST_IO)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1561,7 +1562,7 @@ static void handle_sys(DisasContext *s, uint32_t insn, bool isread,
|
|||
break;
|
||||
}
|
||||
|
||||
if ((s->base.tb->cflags & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
|
||||
if ((tb_cflags(s->base.tb) & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -1592,7 +1593,7 @@ static void handle_sys(DisasContext *s, uint32_t insn, bool isread,
|
|||
}
|
||||
}
|
||||
|
||||
if ((s->base.tb->cflags & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
|
||||
if ((tb_cflags(s->base.tb) & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
|
||||
/* I/O operations must end the TB here (whether read or write) */
|
||||
gen_io_end();
|
||||
s->base.is_jmp = DISAS_UPDATE;
|
||||
|
|
|
@ -7704,7 +7704,7 @@ static int disas_coproc_insn(DisasContext *s, uint32_t insn)
|
|||
break;
|
||||
}
|
||||
|
||||
if ((s->base.tb->cflags & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
|
||||
if ((tb_cflags(s->base.tb) & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -7795,7 +7795,7 @@ static int disas_coproc_insn(DisasContext *s, uint32_t insn)
|
|||
}
|
||||
}
|
||||
|
||||
if ((s->base.tb->cflags & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
|
||||
if ((tb_cflags(s->base.tb) & CF_USE_ICOUNT) && (ri->type & ARM_CP_IO)) {
|
||||
/* I/O operations must end the TB here (whether read or write) */
|
||||
gen_io_end();
|
||||
gen_lookup_tb(s);
|
||||
|
@ -12253,7 +12253,7 @@ static void arm_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
|
|||
{
|
||||
DisasContext *dc = container_of(dcbase, DisasContext, base);
|
||||
|
||||
if (dc->base.tb->cflags & CF_LAST_IO && dc->condjmp) {
|
||||
if (tb_cflags(dc->base.tb) & CF_LAST_IO && dc->condjmp) {
|
||||
/* FIXME: This can theoretically happen with self-modifying code. */
|
||||
cpu_abort(cpu, "IO on conditional branch instruction");
|
||||
}
|
||||
|
|
|
@ -3141,7 +3141,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
|
||||
next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -3171,7 +3171,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
/* Pretty disas. */
|
||||
LOG_DIS("%8.8x:\t", dc->pc);
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
dc->clear_x = 1;
|
||||
|
@ -3244,7 +3244,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
|
||||
npc = dc->pc;
|
||||
|
||||
if (tb->cflags & CF_LAST_IO)
|
||||
if (tb_cflags(tb) & CF_LAST_IO)
|
||||
gen_io_end();
|
||||
/* Force an update if the per-tb cpu state has changed. */
|
||||
if (dc->is_jmp == DISAS_NEXT
|
||||
|
|
|
@ -469,7 +469,7 @@ static DisasJumpType gen_illegal(DisasContext *ctx)
|
|||
static bool use_goto_tb(DisasContext *ctx, target_ulong dest)
|
||||
{
|
||||
/* Suppress goto_tb in the case of single-steping and IO. */
|
||||
if ((ctx->base.tb->cflags & CF_LAST_IO) || ctx->base.singlestep_enabled) {
|
||||
if ((tb_cflags(ctx->base.tb) & CF_LAST_IO) || ctx->base.singlestep_enabled) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
|
|
@ -1118,7 +1118,7 @@ static void gen_bpt_io(DisasContext *s, TCGv_i32 t_port, int ot)
|
|||
|
||||
static inline void gen_ins(DisasContext *s, TCGMemOp ot)
|
||||
{
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_string_movl_A0_EDI(s);
|
||||
|
@ -1133,14 +1133,14 @@ static inline void gen_ins(DisasContext *s, TCGMemOp ot)
|
|||
gen_op_movl_T0_Dshift(ot);
|
||||
gen_op_add_reg_T0(s->aflag, R_EDI);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
}
|
||||
|
||||
static inline void gen_outs(DisasContext *s, TCGMemOp ot)
|
||||
{
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_string_movl_A0_ESI(s);
|
||||
|
@ -1153,7 +1153,7 @@ static inline void gen_outs(DisasContext *s, TCGMemOp ot)
|
|||
gen_op_movl_T0_Dshift(ot);
|
||||
gen_op_add_reg_T0(s->aflag, R_ESI);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
}
|
||||
|
@ -6340,7 +6340,7 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
|
||||
} else {
|
||||
gen_ins(s, ot);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_jmp(s, s->pc - s->cs_base);
|
||||
}
|
||||
}
|
||||
|
@ -6355,7 +6355,7 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
|
||||
} else {
|
||||
gen_outs(s, ot);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_jmp(s, s->pc - s->cs_base);
|
||||
}
|
||||
}
|
||||
|
@ -6371,14 +6371,14 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
tcg_gen_movi_tl(cpu_T0, val);
|
||||
gen_check_io(s, ot, pc_start - s->cs_base,
|
||||
SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
tcg_gen_movi_i32(cpu_tmp2_i32, val);
|
||||
gen_helper_in_func(ot, cpu_T1, cpu_tmp2_i32);
|
||||
gen_op_mov_reg_v(ot, R_EAX, cpu_T1);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_jmp(s, s->pc - s->cs_base);
|
||||
}
|
||||
|
@ -6392,14 +6392,14 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
svm_is_rep(prefixes));
|
||||
gen_op_mov_v_reg(ot, cpu_T1, R_EAX);
|
||||
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
tcg_gen_movi_i32(cpu_tmp2_i32, val);
|
||||
tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T1);
|
||||
gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_jmp(s, s->pc - s->cs_base);
|
||||
}
|
||||
|
@ -6410,14 +6410,14 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
tcg_gen_ext16u_tl(cpu_T0, cpu_regs[R_EDX]);
|
||||
gen_check_io(s, ot, pc_start - s->cs_base,
|
||||
SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T0);
|
||||
gen_helper_in_func(ot, cpu_T1, cpu_tmp2_i32);
|
||||
gen_op_mov_reg_v(ot, R_EAX, cpu_T1);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_jmp(s, s->pc - s->cs_base);
|
||||
}
|
||||
|
@ -6430,14 +6430,14 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
svm_is_rep(prefixes));
|
||||
gen_op_mov_v_reg(ot, cpu_T1, R_EAX);
|
||||
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T0);
|
||||
tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T1);
|
||||
gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32);
|
||||
gen_bpt_io(s, cpu_tmp2_i32, ot);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_jmp(s, s->pc - s->cs_base);
|
||||
}
|
||||
|
@ -7143,11 +7143,11 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
case 0x131: /* rdtsc */
|
||||
gen_update_cc_op(s);
|
||||
gen_jmp_im(pc_start - s->cs_base);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_rdtsc(cpu_env);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_jmp(s, s->pc - s->cs_base);
|
||||
}
|
||||
|
@ -7602,11 +7602,11 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
}
|
||||
gen_update_cc_op(s);
|
||||
gen_jmp_im(pc_start - s->cs_base);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_rdtscp(cpu_env);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_jmp(s, s->pc - s->cs_base);
|
||||
}
|
||||
|
@ -7971,24 +7971,24 @@ static target_ulong disas_insn(DisasContext *s, CPUState *cpu)
|
|||
gen_update_cc_op(s);
|
||||
gen_jmp_im(pc_start - s->cs_base);
|
||||
if (b & 2) {
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_op_mov_v_reg(ot, cpu_T0, rm);
|
||||
gen_helper_write_crN(cpu_env, tcg_const_i32(reg),
|
||||
cpu_T0);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
gen_jmp_im(s->pc - s->cs_base);
|
||||
gen_eob(s);
|
||||
} else {
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_read_crN(cpu_T0, cpu_env, tcg_const_i32(reg));
|
||||
gen_op_mov_reg_v(ot, rm, cpu_T0);
|
||||
if (s->base.tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
}
|
||||
|
@ -8452,7 +8452,7 @@ static int i386_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu,
|
|||
record/replay modes and there will always be an
|
||||
additional step for ecx=0 when icount is enabled.
|
||||
*/
|
||||
dc->repz_opt = !dc->jmp_opt && !(dc->base.tb->cflags & CF_USE_ICOUNT);
|
||||
dc->repz_opt = !dc->jmp_opt && !(tb_cflags(dc->base.tb) & CF_USE_ICOUNT);
|
||||
#if 0
|
||||
/* check addseg logic */
|
||||
if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
|
||||
|
@ -8518,7 +8518,7 @@ static void i386_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
|
|||
the flag and abort the translation to give the irqs a
|
||||
chance to happen */
|
||||
dc->base.is_jmp = DISAS_TOO_MANY;
|
||||
} else if ((dc->base.tb->cflags & CF_USE_ICOUNT)
|
||||
} else if ((tb_cflags(dc->base.tb) & CF_USE_ICOUNT)
|
||||
&& ((dc->base.pc_next & TARGET_PAGE_MASK)
|
||||
!= ((dc->base.pc_next + TARGET_MAX_INSN_SIZE - 1)
|
||||
& TARGET_PAGE_MASK)
|
||||
|
|
|
@ -880,24 +880,24 @@ static void dec_wcsr(DisasContext *dc)
|
|||
break;
|
||||
case CSR_IM:
|
||||
/* mark as an io operation because it could cause an interrupt */
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_wcsr_im(cpu_env, cpu_R[dc->r1]);
|
||||
tcg_gen_movi_tl(cpu_pc, dc->pc + 4);
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
dc->is_jmp = DISAS_UPDATE;
|
||||
break;
|
||||
case CSR_IP:
|
||||
/* mark as an io operation because it could cause an interrupt */
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_wcsr_ip(cpu_env, cpu_R[dc->r1]);
|
||||
tcg_gen_movi_tl(cpu_pc, dc->pc + 4);
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
dc->is_jmp = DISAS_UPDATE;
|
||||
|
@ -1078,7 +1078,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
|
||||
next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -1106,7 +1106,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
/* Pretty disas. */
|
||||
LOG_DIS("%8.8x:\t", dc->pc);
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -1119,7 +1119,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
&& (dc->pc < next_page_start)
|
||||
&& num_insns < max_insns);
|
||||
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
gen_io_end();
|
||||
}
|
||||
|
||||
|
|
|
@ -5547,7 +5547,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
dc->done_mac = 0;
|
||||
dc->writeback_mask = 0;
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -5573,7 +5573,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
break;
|
||||
}
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -5585,7 +5585,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
(pc_offset) < (TARGET_PAGE_SIZE - 32) &&
|
||||
num_insns < max_insns);
|
||||
|
||||
if (tb->cflags & CF_LAST_IO)
|
||||
if (tb_cflags(tb) & CF_LAST_IO)
|
||||
gen_io_end();
|
||||
if (unlikely(cs->singlestep_enabled)) {
|
||||
/* Make sure the pc is updated, and raise a debug exception. */
|
||||
|
|
|
@ -1666,7 +1666,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
|
||||
next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -1701,7 +1701,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
/* Pretty disas. */
|
||||
LOG_DIS("%8.8x:\t", dc->pc);
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -1763,7 +1763,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
npc = dc->jmp_pc;
|
||||
}
|
||||
|
||||
if (tb->cflags & CF_LAST_IO)
|
||||
if (tb_cflags(tb) & CF_LAST_IO)
|
||||
gen_io_end();
|
||||
/* Force an update if the per-tb cpu state has changed. */
|
||||
if (dc->is_jmp == DISAS_NEXT
|
||||
|
|
|
@ -5327,11 +5327,11 @@ static void gen_mfc0(DisasContext *ctx, TCGv arg, int reg, int sel)
|
|||
switch (sel) {
|
||||
case 0:
|
||||
/* Mark as an IO operation because we read the time. */
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_mfc0_count(arg, cpu_env);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
/* Break the TB to be able to take timer interrupts immediately
|
||||
|
@ -5734,7 +5734,7 @@ static void gen_mtc0(DisasContext *ctx, TCGv arg, int reg, int sel)
|
|||
if (sel != 0)
|
||||
check_insn(ctx, ISA_MIPS32);
|
||||
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -6401,7 +6401,7 @@ static void gen_mtc0(DisasContext *ctx, TCGv arg, int reg, int sel)
|
|||
trace_mips_translate_c0("mtc0", rn, reg, sel);
|
||||
|
||||
/* For simplicity assume that all writes can cause interrupts. */
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
/* BS_STOP isn't sufficient, we need to ensure we break out of
|
||||
* translated code to check for pending interrupts. */
|
||||
|
@ -6679,11 +6679,11 @@ static void gen_dmfc0(DisasContext *ctx, TCGv arg, int reg, int sel)
|
|||
switch (sel) {
|
||||
case 0:
|
||||
/* Mark as an IO operation because we read the time. */
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_mfc0_count(arg, cpu_env);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
/* Break the TB to be able to take timer interrupts immediately
|
||||
|
@ -7072,7 +7072,7 @@ static void gen_dmtc0(DisasContext *ctx, TCGv arg, int reg, int sel)
|
|||
if (sel != 0)
|
||||
check_insn(ctx, ISA_MIPS64);
|
||||
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -7727,7 +7727,7 @@ static void gen_dmtc0(DisasContext *ctx, TCGv arg, int reg, int sel)
|
|||
trace_mips_translate_c0("dmtc0", rn, reg, sel);
|
||||
|
||||
/* For simplicity assume that all writes can cause interrupts. */
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
/* BS_STOP isn't sufficient, we need to ensure we break out of
|
||||
* translated code to check for pending interrupts. */
|
||||
|
@ -10756,11 +10756,11 @@ static void gen_rdhwr(DisasContext *ctx, int rt, int rd, int sel)
|
|||
gen_store_gpr(t0, rt);
|
||||
break;
|
||||
case 2:
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_rdhwr_cc(t0, cpu_env);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
gen_store_gpr(t0, rt);
|
||||
|
@ -20248,7 +20248,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
ctx.default_tcg_memop_mask = (ctx.insn_flags & ISA_MIPS32R6) ?
|
||||
MO_UNALN : MO_ALIGN;
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -20274,7 +20274,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
goto done_generating;
|
||||
}
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -20335,7 +20335,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
if (singlestep)
|
||||
break;
|
||||
}
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
gen_io_end();
|
||||
}
|
||||
if (cs->singlestep_enabled && ctx.bstate != BS_BRANCH) {
|
||||
|
|
|
@ -832,7 +832,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
ctx.singlestep_enabled = 0;
|
||||
ctx.bstate = BS_NONE;
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
|
|
@ -827,7 +827,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
max_insns = 1;
|
||||
} else {
|
||||
int page_insns = (TARGET_PAGE_SIZE - (tb->pc & TARGET_PAGE_MASK)) / 4;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -854,7 +854,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
break;
|
||||
}
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -871,7 +871,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
!tcg_op_buf_full() &&
|
||||
num_insns < max_insns);
|
||||
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
gen_io_end();
|
||||
}
|
||||
|
||||
|
|
|
@ -1546,7 +1546,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
|
||||
next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
|
@ -1589,7 +1589,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
break;
|
||||
}
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
disas_openrisc_insn(dc, cpu);
|
||||
|
@ -1612,7 +1612,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
&& (dc->pc < next_page_start)
|
||||
&& num_insns < max_insns);
|
||||
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
gen_io_end();
|
||||
}
|
||||
|
||||
|
|
|
@ -7273,7 +7273,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
msr_se = 1;
|
||||
#endif
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -7301,7 +7301,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
LOG_DISAS("----------------\n");
|
||||
LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
|
||||
ctx.nip, ctx.mem_idx, (int)msr_ir);
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO))
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO))
|
||||
gen_io_start();
|
||||
if (unlikely(need_byteswap(&ctx))) {
|
||||
ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip));
|
||||
|
@ -7382,7 +7382,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
exit(1);
|
||||
}
|
||||
}
|
||||
if (tb->cflags & CF_LAST_IO)
|
||||
if (tb_cflags(tb) & CF_LAST_IO)
|
||||
gen_io_end();
|
||||
if (ctx.exception == POWERPC_EXCP_NONE) {
|
||||
gen_goto_tb(&ctx, 0, ctx.nip);
|
||||
|
|
|
@ -176,11 +176,11 @@ static void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
|
|||
#if !defined(CONFIG_USER_ONLY)
|
||||
static void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
|
||||
{
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_stop_exception(ctx);
|
||||
}
|
||||
|
@ -188,11 +188,11 @@ static void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
|
|||
|
||||
static void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
|
||||
{
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_stop_exception(ctx);
|
||||
}
|
||||
|
@ -203,11 +203,11 @@ static void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
|
|||
/* Time base */
|
||||
static void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
|
||||
{
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_stop_exception(ctx);
|
||||
}
|
||||
|
@ -215,11 +215,11 @@ static void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
|
|||
|
||||
static void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
|
||||
{
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_stop_exception(ctx);
|
||||
}
|
||||
|
@ -240,11 +240,11 @@ static void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
|
|||
#if !defined(CONFIG_USER_ONLY)
|
||||
static void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
|
||||
{
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_stop_exception(ctx);
|
||||
}
|
||||
|
@ -252,11 +252,11 @@ static void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
|
|||
|
||||
static void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
|
||||
{
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_stop_exception(ctx);
|
||||
}
|
||||
|
@ -284,11 +284,11 @@ static void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
|
|||
/* HDECR */
|
||||
static void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
|
||||
{
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_stop_exception(ctx);
|
||||
}
|
||||
|
@ -296,11 +296,11 @@ static void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
|
|||
|
||||
static void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
|
||||
{
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
|
||||
if (ctx->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(ctx->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_stop_exception(ctx);
|
||||
}
|
||||
|
|
|
@ -554,7 +554,7 @@ static void gen_op_calc_cc(DisasContext *s)
|
|||
static bool use_exit_tb(DisasContext *s)
|
||||
{
|
||||
return (s->singlestep_enabled ||
|
||||
(s->tb->cflags & CF_LAST_IO) ||
|
||||
(tb_cflags(s->tb) & CF_LAST_IO) ||
|
||||
(s->tb->flags & FLAG_MASK_PER));
|
||||
}
|
||||
|
||||
|
@ -5883,7 +5883,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
|
||||
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -5908,7 +5908,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
break;
|
||||
}
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -5927,7 +5927,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
}
|
||||
} while (status == NO_EXIT);
|
||||
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
gen_io_end();
|
||||
}
|
||||
|
||||
|
|
|
@ -2248,7 +2248,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
(ctx.tbflags & (1 << SR_RB))) * 0x10;
|
||||
ctx.fbank = ctx.tbflags & FPSCR_FR ? 0x10 : 0;
|
||||
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -2292,7 +2292,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
break;
|
||||
}
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -2300,7 +2300,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
decode_opc(&ctx);
|
||||
ctx.pc += 2;
|
||||
}
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
gen_io_end();
|
||||
}
|
||||
|
||||
|
|
|
@ -5767,7 +5767,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock * tb)
|
|||
#endif
|
||||
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -5796,7 +5796,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock * tb)
|
|||
goto exit_gen_loop;
|
||||
}
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -5823,7 +5823,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock * tb)
|
|||
num_insns < max_insns);
|
||||
|
||||
exit_gen_loop:
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
gen_io_end();
|
||||
}
|
||||
if (!dc->is_br) {
|
||||
|
|
|
@ -2378,7 +2378,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
uint64_t pc_start = tb->pc;
|
||||
uint64_t next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
|
||||
int num_insns = 0;
|
||||
int max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
int max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
|
||||
dc->pc = pc_start;
|
||||
dc->mmuidx = 0;
|
||||
|
|
|
@ -8790,7 +8790,7 @@ void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
|
|||
int num_insns, max_insns;
|
||||
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
|
|
@ -1900,7 +1900,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
cpu_F1d = tcg_temp_new_i64();
|
||||
next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
|
||||
num_insns = 0;
|
||||
max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
if (max_insns == 0) {
|
||||
max_insns = CF_COUNT_MASK;
|
||||
}
|
||||
|
@ -1933,7 +1933,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
goto done_generating;
|
||||
}
|
||||
|
||||
if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -1958,7 +1958,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
dc->pc < next_page_start &&
|
||||
num_insns < max_insns);
|
||||
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
if (dc->condjmp) {
|
||||
/* FIXME: This can theoretically happen with self-modifying
|
||||
code. */
|
||||
|
|
|
@ -517,12 +517,12 @@ static bool gen_check_sr(DisasContext *dc, uint32_t sr, unsigned access)
|
|||
|
||||
static bool gen_rsr_ccount(DisasContext *dc, TCGv_i32 d, uint32_t sr)
|
||||
{
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_update_ccount(cpu_env);
|
||||
tcg_gen_mov_i32(d, cpu_SR[sr]);
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
return true;
|
||||
}
|
||||
|
@ -702,11 +702,11 @@ static bool gen_wsr_cpenable(DisasContext *dc, uint32_t sr, TCGv_i32 v)
|
|||
|
||||
static void gen_check_interrupts(DisasContext *dc)
|
||||
{
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_check_interrupts(cpu_env);
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
}
|
||||
|
@ -760,11 +760,11 @@ static bool gen_wsr_ps(DisasContext *dc, uint32_t sr, TCGv_i32 v)
|
|||
|
||||
static bool gen_wsr_ccount(DisasContext *dc, uint32_t sr, TCGv_i32 v)
|
||||
{
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_wsr_ccount(cpu_env, v);
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_jumpi_check_loop_end(dc, 0);
|
||||
return true;
|
||||
|
@ -801,11 +801,11 @@ static bool gen_wsr_ccompare(DisasContext *dc, uint32_t sr, TCGv_i32 v)
|
|||
|
||||
tcg_gen_mov_i32(cpu_SR[sr], v);
|
||||
tcg_gen_andi_i32(cpu_SR[INTSET], cpu_SR[INTSET], ~int_bit);
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_update_ccompare(cpu_env, tmp);
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
gen_jumpi_check_loop_end(dc, 0);
|
||||
ret = true;
|
||||
|
@ -900,11 +900,11 @@ static void gen_waiti(DisasContext *dc, uint32_t imm4)
|
|||
TCGv_i32 pc = tcg_const_i32(dc->next_pc);
|
||||
TCGv_i32 intlevel = tcg_const_i32(imm4);
|
||||
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_start();
|
||||
}
|
||||
gen_helper_waiti(cpu_env, pc, intlevel);
|
||||
if (dc->tb->cflags & CF_USE_ICOUNT) {
|
||||
if (tb_cflags(dc->tb) & CF_USE_ICOUNT) {
|
||||
gen_io_end();
|
||||
}
|
||||
tcg_temp_free(pc);
|
||||
|
@ -3126,7 +3126,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
CPUXtensaState *env = cs->env_ptr;
|
||||
DisasContext dc;
|
||||
int insn_count = 0;
|
||||
int max_insns = tb->cflags & CF_COUNT_MASK;
|
||||
int max_insns = tb_cflags(tb) & CF_COUNT_MASK;
|
||||
uint32_t pc_start = tb->pc;
|
||||
uint32_t next_page_start =
|
||||
(pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
|
||||
|
@ -3162,7 +3162,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
|
||||
gen_tb_start(tb);
|
||||
|
||||
if ((tb->cflags & CF_USE_ICOUNT) &&
|
||||
if ((tb_cflags(tb) & CF_USE_ICOUNT) &&
|
||||
(tb->flags & XTENSA_TBFLAG_YIELD)) {
|
||||
tcg_gen_insn_start(dc.pc);
|
||||
++insn_count;
|
||||
|
@ -3194,7 +3194,7 @@ void gen_intermediate_code(CPUState *cs, TranslationBlock *tb)
|
|||
break;
|
||||
}
|
||||
|
||||
if (insn_count == max_insns && (tb->cflags & CF_LAST_IO)) {
|
||||
if (insn_count == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
|
||||
gen_io_start();
|
||||
}
|
||||
|
||||
|
@ -3235,7 +3235,7 @@ done:
|
|||
tcg_temp_free(dc.next_icount);
|
||||
}
|
||||
|
||||
if (tb->cflags & CF_LAST_IO) {
|
||||
if (tb_cflags(tb) & CF_LAST_IO) {
|
||||
gen_io_end();
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue