accel/tcg: Add cpu_{ld,st}*_mmu interfaces

These functions are much closer to the softmmu helper
functions, in that they take the complete MemOpIdx,
and from that they may enforce required alignment.

The previous cpu_ldst.h functions did not have alignment info,
and so did not enforce it.  Retain this by adding MO_UNALN to
the MemOp that we create in calling the new functions.

Note that we are not yet enforcing alignment for user-only,
but we now have the information with which to do so.

Reviewed-by: Peter Maydell <peter.maydell@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2021-07-27 07:48:55 -10:00
parent f79e80899d
commit f83bcecb1f
5 changed files with 717 additions and 664 deletions

View File

@ -1839,6 +1839,25 @@ static void *atomic_mmu_lookup(CPUArchState *env, target_ulong addr,
cpu_loop_exit_atomic(env_cpu(env), retaddr); cpu_loop_exit_atomic(env_cpu(env), retaddr);
} }
/*
* Verify that we have passed the correct MemOp to the correct function.
*
* In the case of the helper_*_mmu functions, we will have done this by
* using the MemOp to look up the helper during code generation.
*
* In the case of the cpu_*_mmu functions, this is up to the caller.
* We could present one function to target code, and dispatch based on
* the MemOp, but so far we have worked hard to avoid an indirect function
* call along the memory path.
*/
static void validate_memop(MemOpIdx oi, MemOp expected)
{
#ifdef CONFIG_DEBUG_TCG
MemOp have = get_memop(oi) & (MO_SIZE | MO_BSWAP);
assert(have == expected);
#endif
}
/* /*
* Load Helpers * Load Helpers
* *
@ -1992,6 +2011,7 @@ load_helper(CPUArchState *env, target_ulong addr, MemOpIdx oi,
static uint64_t full_ldub_mmu(CPUArchState *env, target_ulong addr, static uint64_t full_ldub_mmu(CPUArchState *env, target_ulong addr,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_UB);
return load_helper(env, addr, oi, retaddr, MO_UB, false, full_ldub_mmu); return load_helper(env, addr, oi, retaddr, MO_UB, false, full_ldub_mmu);
} }
@ -2004,6 +2024,7 @@ tcg_target_ulong helper_ret_ldub_mmu(CPUArchState *env, target_ulong addr,
static uint64_t full_le_lduw_mmu(CPUArchState *env, target_ulong addr, static uint64_t full_le_lduw_mmu(CPUArchState *env, target_ulong addr,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_LEUW);
return load_helper(env, addr, oi, retaddr, MO_LEUW, false, return load_helper(env, addr, oi, retaddr, MO_LEUW, false,
full_le_lduw_mmu); full_le_lduw_mmu);
} }
@ -2017,6 +2038,7 @@ tcg_target_ulong helper_le_lduw_mmu(CPUArchState *env, target_ulong addr,
static uint64_t full_be_lduw_mmu(CPUArchState *env, target_ulong addr, static uint64_t full_be_lduw_mmu(CPUArchState *env, target_ulong addr,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_BEUW);
return load_helper(env, addr, oi, retaddr, MO_BEUW, false, return load_helper(env, addr, oi, retaddr, MO_BEUW, false,
full_be_lduw_mmu); full_be_lduw_mmu);
} }
@ -2030,6 +2052,7 @@ tcg_target_ulong helper_be_lduw_mmu(CPUArchState *env, target_ulong addr,
static uint64_t full_le_ldul_mmu(CPUArchState *env, target_ulong addr, static uint64_t full_le_ldul_mmu(CPUArchState *env, target_ulong addr,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_LEUL);
return load_helper(env, addr, oi, retaddr, MO_LEUL, false, return load_helper(env, addr, oi, retaddr, MO_LEUL, false,
full_le_ldul_mmu); full_le_ldul_mmu);
} }
@ -2043,6 +2066,7 @@ tcg_target_ulong helper_le_ldul_mmu(CPUArchState *env, target_ulong addr,
static uint64_t full_be_ldul_mmu(CPUArchState *env, target_ulong addr, static uint64_t full_be_ldul_mmu(CPUArchState *env, target_ulong addr,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_BEUL);
return load_helper(env, addr, oi, retaddr, MO_BEUL, false, return load_helper(env, addr, oi, retaddr, MO_BEUL, false,
full_be_ldul_mmu); full_be_ldul_mmu);
} }
@ -2056,6 +2080,7 @@ tcg_target_ulong helper_be_ldul_mmu(CPUArchState *env, target_ulong addr,
uint64_t helper_le_ldq_mmu(CPUArchState *env, target_ulong addr, uint64_t helper_le_ldq_mmu(CPUArchState *env, target_ulong addr,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_LEQ);
return load_helper(env, addr, oi, retaddr, MO_LEQ, false, return load_helper(env, addr, oi, retaddr, MO_LEQ, false,
helper_le_ldq_mmu); helper_le_ldq_mmu);
} }
@ -2063,6 +2088,7 @@ uint64_t helper_le_ldq_mmu(CPUArchState *env, target_ulong addr,
uint64_t helper_be_ldq_mmu(CPUArchState *env, target_ulong addr, uint64_t helper_be_ldq_mmu(CPUArchState *env, target_ulong addr,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_BEQ);
return load_helper(env, addr, oi, retaddr, MO_BEQ, false, return load_helper(env, addr, oi, retaddr, MO_BEQ, false,
helper_be_ldq_mmu); helper_be_ldq_mmu);
} }
@ -2108,186 +2134,56 @@ tcg_target_ulong helper_be_ldsl_mmu(CPUArchState *env, target_ulong addr,
*/ */
static inline uint64_t cpu_load_helper(CPUArchState *env, abi_ptr addr, static inline uint64_t cpu_load_helper(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t retaddr, MemOpIdx oi, uintptr_t retaddr,
MemOp op, FullLoadHelper *full_load) FullLoadHelper *full_load)
{ {
MemOpIdx oi = make_memop_idx(op, mmu_idx);
uint64_t ret; uint64_t ret;
trace_guest_ld_before_exec(env_cpu(env), addr, oi); trace_guest_ld_before_exec(env_cpu(env), addr, oi);
ret = full_load(env, addr, oi, retaddr); ret = full_load(env, addr, oi, retaddr);
qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R); qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R);
return ret; return ret;
} }
uint32_t cpu_ldub_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint8_t cpu_ldb_mmu(CPUArchState *env, abi_ptr addr, MemOpIdx oi, uintptr_t ra)
int mmu_idx, uintptr_t ra)
{ {
return cpu_load_helper(env, addr, mmu_idx, ra, MO_UB, full_ldub_mmu); return cpu_load_helper(env, addr, oi, ra, full_ldub_mmu);
} }
int cpu_ldsb_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint16_t cpu_ldw_be_mmu(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra) MemOpIdx oi, uintptr_t ra)
{ {
return (int8_t)cpu_ldub_mmuidx_ra(env, addr, mmu_idx, ra); return cpu_load_helper(env, addr, oi, ra, full_be_lduw_mmu);
} }
uint32_t cpu_lduw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t cpu_ldl_be_mmu(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra) MemOpIdx oi, uintptr_t ra)
{ {
return cpu_load_helper(env, addr, mmu_idx, ra, MO_BEUW, full_be_lduw_mmu); return cpu_load_helper(env, addr, oi, ra, full_be_ldul_mmu);
} }
int cpu_ldsw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t cpu_ldq_be_mmu(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra) MemOpIdx oi, uintptr_t ra)
{ {
return (int16_t)cpu_lduw_be_mmuidx_ra(env, addr, mmu_idx, ra); return cpu_load_helper(env, addr, oi, MO_BEQ, helper_be_ldq_mmu);
} }
uint32_t cpu_ldl_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint16_t cpu_ldw_le_mmu(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra) MemOpIdx oi, uintptr_t ra)
{ {
return cpu_load_helper(env, addr, mmu_idx, ra, MO_BEUL, full_be_ldul_mmu); return cpu_load_helper(env, addr, oi, ra, full_le_lduw_mmu);
} }
uint64_t cpu_ldq_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t cpu_ldl_le_mmu(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra) MemOpIdx oi, uintptr_t ra)
{ {
return cpu_load_helper(env, addr, mmu_idx, ra, MO_BEQ, helper_be_ldq_mmu); return cpu_load_helper(env, addr, oi, ra, full_le_ldul_mmu);
} }
uint32_t cpu_lduw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t cpu_ldq_le_mmu(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra) MemOpIdx oi, uintptr_t ra)
{ {
return cpu_load_helper(env, addr, mmu_idx, ra, MO_LEUW, full_le_lduw_mmu); return cpu_load_helper(env, addr, oi, ra, helper_le_ldq_mmu);
}
int cpu_ldsw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return (int16_t)cpu_lduw_le_mmuidx_ra(env, addr, mmu_idx, ra);
}
uint32_t cpu_ldl_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_load_helper(env, addr, mmu_idx, ra, MO_LEUL, full_le_ldul_mmu);
}
uint64_t cpu_ldq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_load_helper(env, addr, mmu_idx, ra, MO_LEQ, helper_le_ldq_mmu);
}
uint32_t cpu_ldub_data_ra(CPUArchState *env, target_ulong ptr,
uintptr_t retaddr)
{
return cpu_ldub_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
int cpu_ldsb_data_ra(CPUArchState *env, target_ulong ptr, uintptr_t retaddr)
{
return cpu_ldsb_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
uint32_t cpu_lduw_be_data_ra(CPUArchState *env, target_ulong ptr,
uintptr_t retaddr)
{
return cpu_lduw_be_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
int cpu_ldsw_be_data_ra(CPUArchState *env, target_ulong ptr, uintptr_t retaddr)
{
return cpu_ldsw_be_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
uint32_t cpu_ldl_be_data_ra(CPUArchState *env, target_ulong ptr,
uintptr_t retaddr)
{
return cpu_ldl_be_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
uint64_t cpu_ldq_be_data_ra(CPUArchState *env, target_ulong ptr,
uintptr_t retaddr)
{
return cpu_ldq_be_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
uint32_t cpu_lduw_le_data_ra(CPUArchState *env, target_ulong ptr,
uintptr_t retaddr)
{
return cpu_lduw_le_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
int cpu_ldsw_le_data_ra(CPUArchState *env, target_ulong ptr, uintptr_t retaddr)
{
return cpu_ldsw_le_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
uint32_t cpu_ldl_le_data_ra(CPUArchState *env, target_ulong ptr,
uintptr_t retaddr)
{
return cpu_ldl_le_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
uint64_t cpu_ldq_le_data_ra(CPUArchState *env, target_ulong ptr,
uintptr_t retaddr)
{
return cpu_ldq_le_mmuidx_ra(env, ptr, cpu_mmu_index(env, false), retaddr);
}
uint32_t cpu_ldub_data(CPUArchState *env, target_ulong ptr)
{
return cpu_ldub_data_ra(env, ptr, 0);
}
int cpu_ldsb_data(CPUArchState *env, target_ulong ptr)
{
return cpu_ldsb_data_ra(env, ptr, 0);
}
uint32_t cpu_lduw_be_data(CPUArchState *env, target_ulong ptr)
{
return cpu_lduw_be_data_ra(env, ptr, 0);
}
int cpu_ldsw_be_data(CPUArchState *env, target_ulong ptr)
{
return cpu_ldsw_be_data_ra(env, ptr, 0);
}
uint32_t cpu_ldl_be_data(CPUArchState *env, target_ulong ptr)
{
return cpu_ldl_be_data_ra(env, ptr, 0);
}
uint64_t cpu_ldq_be_data(CPUArchState *env, target_ulong ptr)
{
return cpu_ldq_be_data_ra(env, ptr, 0);
}
uint32_t cpu_lduw_le_data(CPUArchState *env, target_ulong ptr)
{
return cpu_lduw_le_data_ra(env, ptr, 0);
}
int cpu_ldsw_le_data(CPUArchState *env, target_ulong ptr)
{
return cpu_ldsw_le_data_ra(env, ptr, 0);
}
uint32_t cpu_ldl_le_data(CPUArchState *env, target_ulong ptr)
{
return cpu_ldl_le_data_ra(env, ptr, 0);
}
uint64_t cpu_ldq_le_data(CPUArchState *env, target_ulong ptr)
{
return cpu_ldq_le_data_ra(env, ptr, 0);
} }
/* /*
@ -2324,6 +2220,9 @@ store_memop(void *haddr, uint64_t val, MemOp op)
} }
} }
static void full_stb_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr);
static void __attribute__((noinline)) static void __attribute__((noinline))
store_helper_unaligned(CPUArchState *env, target_ulong addr, uint64_t val, store_helper_unaligned(CPUArchState *env, target_ulong addr, uint64_t val,
uintptr_t retaddr, size_t size, uintptr_t mmu_idx, uintptr_t retaddr, size_t size, uintptr_t mmu_idx,
@ -2387,13 +2286,13 @@ store_helper_unaligned(CPUArchState *env, target_ulong addr, uint64_t val,
for (i = 0; i < size; ++i) { for (i = 0; i < size; ++i) {
/* Big-endian extract. */ /* Big-endian extract. */
uint8_t val8 = val >> (((size - 1) * 8) - (i * 8)); uint8_t val8 = val >> (((size - 1) * 8) - (i * 8));
helper_ret_stb_mmu(env, addr + i, val8, oi, retaddr); full_stb_mmu(env, addr + i, val8, oi, retaddr);
} }
} else { } else {
for (i = 0; i < size; ++i) { for (i = 0; i < size; ++i) {
/* Little-endian extract. */ /* Little-endian extract. */
uint8_t val8 = val >> (i * 8); uint8_t val8 = val >> (i * 8);
helper_ret_stb_mmu(env, addr + i, val8, oi, retaddr); full_stb_mmu(env, addr + i, val8, oi, retaddr);
} }
} }
} }
@ -2496,46 +2395,83 @@ store_helper(CPUArchState *env, target_ulong addr, uint64_t val,
store_memop(haddr, val, op); store_memop(haddr, val, op);
} }
void __attribute__((noinline)) static void __attribute__((noinline))
helper_ret_stb_mmu(CPUArchState *env, target_ulong addr, uint8_t val, full_stb_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_UB);
store_helper(env, addr, val, oi, retaddr, MO_UB); store_helper(env, addr, val, oi, retaddr, MO_UB);
} }
void helper_ret_stb_mmu(CPUArchState *env, target_ulong addr, uint8_t val,
MemOpIdx oi, uintptr_t retaddr)
{
full_stb_mmu(env, addr, val, oi, retaddr);
}
static void full_le_stw_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr)
{
validate_memop(oi, MO_LEUW);
store_helper(env, addr, val, oi, retaddr, MO_LEUW);
}
void helper_le_stw_mmu(CPUArchState *env, target_ulong addr, uint16_t val, void helper_le_stw_mmu(CPUArchState *env, target_ulong addr, uint16_t val,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
store_helper(env, addr, val, oi, retaddr, MO_LEUW); full_le_stw_mmu(env, addr, val, oi, retaddr);
}
static void full_be_stw_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr)
{
validate_memop(oi, MO_BEUW);
store_helper(env, addr, val, oi, retaddr, MO_BEUW);
} }
void helper_be_stw_mmu(CPUArchState *env, target_ulong addr, uint16_t val, void helper_be_stw_mmu(CPUArchState *env, target_ulong addr, uint16_t val,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
store_helper(env, addr, val, oi, retaddr, MO_BEUW); full_be_stw_mmu(env, addr, val, oi, retaddr);
}
static void full_le_stl_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr)
{
validate_memop(oi, MO_LEUL);
store_helper(env, addr, val, oi, retaddr, MO_LEUL);
} }
void helper_le_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val, void helper_le_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
store_helper(env, addr, val, oi, retaddr, MO_LEUL); full_le_stl_mmu(env, addr, val, oi, retaddr);
}
static void full_be_stl_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr)
{
validate_memop(oi, MO_BEUL);
store_helper(env, addr, val, oi, retaddr, MO_BEUL);
} }
void helper_be_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val, void helper_be_stl_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
store_helper(env, addr, val, oi, retaddr, MO_BEUL); full_be_stl_mmu(env, addr, val, oi, retaddr);
} }
void helper_le_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val, void helper_le_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_LEQ);
store_helper(env, addr, val, oi, retaddr, MO_LEQ); store_helper(env, addr, val, oi, retaddr, MO_LEQ);
} }
void helper_be_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val, void helper_be_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
MemOpIdx oi, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
validate_memop(oi, MO_BEQ);
store_helper(env, addr, val, oi, retaddr, MO_BEQ); store_helper(env, addr, val, oi, retaddr, MO_BEQ);
} }
@ -2543,137 +2479,61 @@ void helper_be_stq_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
* Store Helpers for cpu_ldst.h * Store Helpers for cpu_ldst.h
*/ */
static inline void QEMU_ALWAYS_INLINE typedef void FullStoreHelper(CPUArchState *env, target_ulong addr,
cpu_store_helper(CPUArchState *env, target_ulong addr, uint64_t val, uint64_t val, MemOpIdx oi, uintptr_t retaddr);
int mmu_idx, uintptr_t retaddr, MemOp op)
static inline void cpu_store_helper(CPUArchState *env, target_ulong addr,
uint64_t val, MemOpIdx oi, uintptr_t ra,
FullStoreHelper *full_store)
{ {
MemOpIdx oi = make_memop_idx(op, mmu_idx);
trace_guest_st_before_exec(env_cpu(env), addr, oi); trace_guest_st_before_exec(env_cpu(env), addr, oi);
full_store(env, addr, val, oi, ra);
store_helper(env, addr, val, oi, retaddr, op);
qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W); qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W);
} }
void cpu_stb_mmuidx_ra(CPUArchState *env, target_ulong addr, uint32_t val, void cpu_stb_mmu(CPUArchState *env, target_ulong addr, uint8_t val,
int mmu_idx, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
cpu_store_helper(env, addr, val, mmu_idx, retaddr, MO_UB); cpu_store_helper(env, addr, val, oi, retaddr, full_stb_mmu);
} }
void cpu_stw_be_mmuidx_ra(CPUArchState *env, target_ulong addr, uint32_t val, void cpu_stw_be_mmu(CPUArchState *env, target_ulong addr, uint16_t val,
int mmu_idx, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
cpu_store_helper(env, addr, val, mmu_idx, retaddr, MO_BEUW); cpu_store_helper(env, addr, val, oi, retaddr, full_be_stw_mmu);
} }
void cpu_stl_be_mmuidx_ra(CPUArchState *env, target_ulong addr, uint32_t val, void cpu_stl_be_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
int mmu_idx, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
cpu_store_helper(env, addr, val, mmu_idx, retaddr, MO_BEUL); cpu_store_helper(env, addr, val, oi, retaddr, full_be_stl_mmu);
} }
void cpu_stq_be_mmuidx_ra(CPUArchState *env, target_ulong addr, uint64_t val, void cpu_stq_be_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
int mmu_idx, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
cpu_store_helper(env, addr, val, mmu_idx, retaddr, MO_BEQ); cpu_store_helper(env, addr, val, oi, retaddr, helper_be_stq_mmu);
} }
void cpu_stw_le_mmuidx_ra(CPUArchState *env, target_ulong addr, uint32_t val, void cpu_stw_le_mmu(CPUArchState *env, target_ulong addr, uint16_t val,
int mmu_idx, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
cpu_store_helper(env, addr, val, mmu_idx, retaddr, MO_LEUW); cpu_store_helper(env, addr, val, oi, retaddr, full_le_stw_mmu);
} }
void cpu_stl_le_mmuidx_ra(CPUArchState *env, target_ulong addr, uint32_t val, void cpu_stl_le_mmu(CPUArchState *env, target_ulong addr, uint32_t val,
int mmu_idx, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
cpu_store_helper(env, addr, val, mmu_idx, retaddr, MO_LEUL); cpu_store_helper(env, addr, val, oi, retaddr, full_le_stl_mmu);
} }
void cpu_stq_le_mmuidx_ra(CPUArchState *env, target_ulong addr, uint64_t val, void cpu_stq_le_mmu(CPUArchState *env, target_ulong addr, uint64_t val,
int mmu_idx, uintptr_t retaddr) MemOpIdx oi, uintptr_t retaddr)
{ {
cpu_store_helper(env, addr, val, mmu_idx, retaddr, MO_LEQ); cpu_store_helper(env, addr, val, oi, retaddr, helper_le_stq_mmu);
} }
void cpu_stb_data_ra(CPUArchState *env, target_ulong ptr, #include "ldst_common.c.inc"
uint32_t val, uintptr_t retaddr)
{
cpu_stb_mmuidx_ra(env, ptr, val, cpu_mmu_index(env, false), retaddr);
}
void cpu_stw_be_data_ra(CPUArchState *env, target_ulong ptr,
uint32_t val, uintptr_t retaddr)
{
cpu_stw_be_mmuidx_ra(env, ptr, val, cpu_mmu_index(env, false), retaddr);
}
void cpu_stl_be_data_ra(CPUArchState *env, target_ulong ptr,
uint32_t val, uintptr_t retaddr)
{
cpu_stl_be_mmuidx_ra(env, ptr, val, cpu_mmu_index(env, false), retaddr);
}
void cpu_stq_be_data_ra(CPUArchState *env, target_ulong ptr,
uint64_t val, uintptr_t retaddr)
{
cpu_stq_be_mmuidx_ra(env, ptr, val, cpu_mmu_index(env, false), retaddr);
}
void cpu_stw_le_data_ra(CPUArchState *env, target_ulong ptr,
uint32_t val, uintptr_t retaddr)
{
cpu_stw_le_mmuidx_ra(env, ptr, val, cpu_mmu_index(env, false), retaddr);
}
void cpu_stl_le_data_ra(CPUArchState *env, target_ulong ptr,
uint32_t val, uintptr_t retaddr)
{
cpu_stl_le_mmuidx_ra(env, ptr, val, cpu_mmu_index(env, false), retaddr);
}
void cpu_stq_le_data_ra(CPUArchState *env, target_ulong ptr,
uint64_t val, uintptr_t retaddr)
{
cpu_stq_le_mmuidx_ra(env, ptr, val, cpu_mmu_index(env, false), retaddr);
}
void cpu_stb_data(CPUArchState *env, target_ulong ptr, uint32_t val)
{
cpu_stb_data_ra(env, ptr, val, 0);
}
void cpu_stw_be_data(CPUArchState *env, target_ulong ptr, uint32_t val)
{
cpu_stw_be_data_ra(env, ptr, val, 0);
}
void cpu_stl_be_data(CPUArchState *env, target_ulong ptr, uint32_t val)
{
cpu_stl_be_data_ra(env, ptr, val, 0);
}
void cpu_stq_be_data(CPUArchState *env, target_ulong ptr, uint64_t val)
{
cpu_stq_be_data_ra(env, ptr, val, 0);
}
void cpu_stw_le_data(CPUArchState *env, target_ulong ptr, uint32_t val)
{
cpu_stw_le_data_ra(env, ptr, val, 0);
}
void cpu_stl_le_data(CPUArchState *env, target_ulong ptr, uint32_t val)
{
cpu_stl_le_data_ra(env, ptr, val, 0);
}
void cpu_stq_le_data(CPUArchState *env, target_ulong ptr, uint64_t val)
{
cpu_stq_le_data_ra(env, ptr, val, 0);
}
/* /*
* First set of functions passes in OI and RETADDR. * First set of functions passes in OI and RETADDR.

307
accel/tcg/ldst_common.c.inc Normal file
View File

@ -0,0 +1,307 @@
/*
* Routines common to user and system emulation of load/store.
*
* Copyright (c) 2003 Fabrice Bellard
*
* SPDX-License-Identifier: GPL-2.0-or-later
*
* This work is licensed under the terms of the GNU GPL, version 2 or later.
* See the COPYING file in the top-level directory.
*/
uint32_t cpu_ldub_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_UB, mmu_idx);
return cpu_ldb_mmu(env, addr, oi, ra);
}
int cpu_ldsb_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return (int8_t)cpu_ldub_mmuidx_ra(env, addr, mmu_idx, ra);
}
uint32_t cpu_lduw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_BEUW | MO_UNALN, mmu_idx);
return cpu_ldw_be_mmu(env, addr, oi, ra);
}
int cpu_ldsw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return (int16_t)cpu_lduw_be_mmuidx_ra(env, addr, mmu_idx, ra);
}
uint32_t cpu_ldl_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_BEUL | MO_UNALN, mmu_idx);
return cpu_ldl_be_mmu(env, addr, oi, ra);
}
uint64_t cpu_ldq_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_BEQ | MO_UNALN, mmu_idx);
return cpu_ldq_be_mmu(env, addr, oi, ra);
}
uint32_t cpu_lduw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_LEUW | MO_UNALN, mmu_idx);
return cpu_ldw_le_mmu(env, addr, oi, ra);
}
int cpu_ldsw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return (int16_t)cpu_lduw_le_mmuidx_ra(env, addr, mmu_idx, ra);
}
uint32_t cpu_ldl_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_LEUL | MO_UNALN, mmu_idx);
return cpu_ldl_le_mmu(env, addr, oi, ra);
}
uint64_t cpu_ldq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_LEQ | MO_UNALN, mmu_idx);
return cpu_ldq_le_mmu(env, addr, oi, ra);
}
void cpu_stb_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_UB, mmu_idx);
cpu_stb_mmu(env, addr, val, oi, ra);
}
void cpu_stw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_BEUW | MO_UNALN, mmu_idx);
cpu_stw_be_mmu(env, addr, val, oi, ra);
}
void cpu_stl_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_BEUL | MO_UNALN, mmu_idx);
cpu_stl_be_mmu(env, addr, val, oi, ra);
}
void cpu_stq_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t val,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_BEQ | MO_UNALN, mmu_idx);
cpu_stq_be_mmu(env, addr, val, oi, ra);
}
void cpu_stw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_LEUW | MO_UNALN, mmu_idx);
cpu_stw_le_mmu(env, addr, val, oi, ra);
}
void cpu_stl_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_LEUL | MO_UNALN, mmu_idx);
cpu_stl_le_mmu(env, addr, val, oi, ra);
}
void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t val,
int mmu_idx, uintptr_t ra)
{
MemOpIdx oi = make_memop_idx(MO_LEQ | MO_UNALN, mmu_idx);
cpu_stq_le_mmu(env, addr, val, oi, ra);
}
/*--------------------------*/
uint32_t cpu_ldub_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return cpu_ldub_mmuidx_ra(env, addr, cpu_mmu_index(env, false), ra);
}
int cpu_ldsb_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return (int8_t)cpu_ldub_data_ra(env, addr, ra);
}
uint32_t cpu_lduw_be_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return cpu_lduw_be_mmuidx_ra(env, addr, cpu_mmu_index(env, false), ra);
}
int cpu_ldsw_be_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return (int16_t)cpu_lduw_be_data_ra(env, addr, ra);
}
uint32_t cpu_ldl_be_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return cpu_ldl_be_mmuidx_ra(env, addr, cpu_mmu_index(env, false), ra);
}
uint64_t cpu_ldq_be_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return cpu_ldq_be_mmuidx_ra(env, addr, cpu_mmu_index(env, false), ra);
}
uint32_t cpu_lduw_le_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return cpu_lduw_le_mmuidx_ra(env, addr, cpu_mmu_index(env, false), ra);
}
int cpu_ldsw_le_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return (int16_t)cpu_lduw_le_data_ra(env, addr, ra);
}
uint32_t cpu_ldl_le_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return cpu_ldl_le_mmuidx_ra(env, addr, cpu_mmu_index(env, false), ra);
}
uint64_t cpu_ldq_le_data_ra(CPUArchState *env, abi_ptr addr, uintptr_t ra)
{
return cpu_ldq_le_mmuidx_ra(env, addr, cpu_mmu_index(env, false), ra);
}
void cpu_stb_data_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, uintptr_t ra)
{
cpu_stb_mmuidx_ra(env, addr, val, cpu_mmu_index(env, false), ra);
}
void cpu_stw_be_data_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, uintptr_t ra)
{
cpu_stw_be_mmuidx_ra(env, addr, val, cpu_mmu_index(env, false), ra);
}
void cpu_stl_be_data_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, uintptr_t ra)
{
cpu_stl_be_mmuidx_ra(env, addr, val, cpu_mmu_index(env, false), ra);
}
void cpu_stq_be_data_ra(CPUArchState *env, abi_ptr addr,
uint64_t val, uintptr_t ra)
{
cpu_stq_be_mmuidx_ra(env, addr, val, cpu_mmu_index(env, false), ra);
}
void cpu_stw_le_data_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, uintptr_t ra)
{
cpu_stw_le_mmuidx_ra(env, addr, val, cpu_mmu_index(env, false), ra);
}
void cpu_stl_le_data_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, uintptr_t ra)
{
cpu_stl_le_mmuidx_ra(env, addr, val, cpu_mmu_index(env, false), ra);
}
void cpu_stq_le_data_ra(CPUArchState *env, abi_ptr addr,
uint64_t val, uintptr_t ra)
{
cpu_stq_le_mmuidx_ra(env, addr, val, cpu_mmu_index(env, false), ra);
}
/*--------------------------*/
uint32_t cpu_ldub_data(CPUArchState *env, abi_ptr addr)
{
return cpu_ldub_data_ra(env, addr, 0);
}
int cpu_ldsb_data(CPUArchState *env, abi_ptr addr)
{
return (int8_t)cpu_ldub_data(env, addr);
}
uint32_t cpu_lduw_be_data(CPUArchState *env, abi_ptr addr)
{
return cpu_lduw_be_data_ra(env, addr, 0);
}
int cpu_ldsw_be_data(CPUArchState *env, abi_ptr addr)
{
return (int16_t)cpu_lduw_be_data(env, addr);
}
uint32_t cpu_ldl_be_data(CPUArchState *env, abi_ptr addr)
{
return cpu_ldl_be_data_ra(env, addr, 0);
}
uint64_t cpu_ldq_be_data(CPUArchState *env, abi_ptr addr)
{
return cpu_ldq_be_data_ra(env, addr, 0);
}
uint32_t cpu_lduw_le_data(CPUArchState *env, abi_ptr addr)
{
return cpu_lduw_le_data_ra(env, addr, 0);
}
int cpu_ldsw_le_data(CPUArchState *env, abi_ptr addr)
{
return (int16_t)cpu_lduw_le_data(env, addr);
}
uint32_t cpu_ldl_le_data(CPUArchState *env, abi_ptr addr)
{
return cpu_ldl_le_data_ra(env, addr, 0);
}
uint64_t cpu_ldq_le_data(CPUArchState *env, abi_ptr addr)
{
return cpu_ldq_le_data_ra(env, addr, 0);
}
void cpu_stb_data(CPUArchState *env, abi_ptr addr, uint32_t val)
{
cpu_stb_data_ra(env, addr, val, 0);
}
void cpu_stw_be_data(CPUArchState *env, abi_ptr addr, uint32_t val)
{
cpu_stw_be_data_ra(env, addr, val, 0);
}
void cpu_stl_be_data(CPUArchState *env, abi_ptr addr, uint32_t val)
{
cpu_stl_be_data_ra(env, addr, val, 0);
}
void cpu_stq_be_data(CPUArchState *env, abi_ptr addr, uint64_t val)
{
cpu_stq_be_data_ra(env, addr, val, 0);
}
void cpu_stw_le_data(CPUArchState *env, abi_ptr addr, uint32_t val)
{
cpu_stw_le_data_ra(env, addr, val, 0);
}
void cpu_stl_le_data(CPUArchState *env, abi_ptr addr, uint32_t val)
{
cpu_stl_le_data_ra(env, addr, val, 0);
}
void cpu_stq_le_data(CPUArchState *env, abi_ptr addr, uint64_t val)
{
cpu_stq_le_data_ra(env, addr, val, 0);
}

View File

@ -886,300 +886,227 @@ int cpu_signal_handler(int host_signum, void *pinfo,
/* The softmmu versions of these helpers are in cputlb.c. */ /* The softmmu versions of these helpers are in cputlb.c. */
uint32_t cpu_ldub_data(CPUArchState *env, abi_ptr ptr) /*
* Verify that we have passed the correct MemOp to the correct function.
*
* We could present one function to target code, and dispatch based on
* the MemOp, but so far we have worked hard to avoid an indirect function
* call along the memory path.
*/
static void validate_memop(MemOpIdx oi, MemOp expected)
{ {
MemOpIdx oi = make_memop_idx(MO_UB, MMU_USER_IDX); #ifdef CONFIG_DEBUG_TCG
uint32_t ret; MemOp have = get_memop(oi) & (MO_SIZE | MO_BSWAP);
assert(have == expected);
#endif
}
trace_guest_ld_before_exec(env_cpu(env), ptr, oi); static void *cpu_mmu_lookup(CPUArchState *env, target_ulong addr,
ret = ldub_p(g2h(env_cpu(env), ptr)); MemOpIdx oi, uintptr_t ra, MMUAccessType type)
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_R); {
void *ret;
/* TODO: Enforce guest required alignment. */
ret = g2h(env_cpu(env), addr);
set_helper_retaddr(ra);
return ret; return ret;
} }
int cpu_ldsb_data(CPUArchState *env, abi_ptr ptr) uint8_t cpu_ldb_mmu(CPUArchState *env, abi_ptr addr,
MemOpIdx oi, uintptr_t ra)
{ {
return (int8_t)cpu_ldub_data(env, ptr); void *haddr;
} uint8_t ret;
uint32_t cpu_lduw_be_data(CPUArchState *env, abi_ptr ptr) validate_memop(oi, MO_UB);
{ trace_guest_ld_before_exec(env_cpu(env), addr, oi);
MemOpIdx oi = make_memop_idx(MO_BEUW, MMU_USER_IDX); haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_LOAD);
uint32_t ret; ret = ldub_p(haddr);
clear_helper_retaddr();
trace_guest_ld_before_exec(env_cpu(env), ptr, oi); qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R);
ret = lduw_be_p(g2h(env_cpu(env), ptr));
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_R);
return ret; return ret;
} }
int cpu_ldsw_be_data(CPUArchState *env, abi_ptr ptr) uint16_t cpu_ldw_be_mmu(CPUArchState *env, abi_ptr addr,
MemOpIdx oi, uintptr_t ra)
{ {
return (int16_t)cpu_lduw_be_data(env, ptr); void *haddr;
} uint16_t ret;
uint32_t cpu_ldl_be_data(CPUArchState *env, abi_ptr ptr) validate_memop(oi, MO_BEUW);
{ trace_guest_ld_before_exec(env_cpu(env), addr, oi);
MemOpIdx oi = make_memop_idx(MO_BEUL, MMU_USER_IDX); haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_LOAD);
uint32_t ret; ret = lduw_be_p(haddr);
clear_helper_retaddr();
trace_guest_ld_before_exec(env_cpu(env), ptr, oi); qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R);
ret = ldl_be_p(g2h(env_cpu(env), ptr));
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_R);
return ret; return ret;
} }
uint64_t cpu_ldq_be_data(CPUArchState *env, abi_ptr ptr) uint32_t cpu_ldl_be_mmu(CPUArchState *env, abi_ptr addr,
MemOpIdx oi, uintptr_t ra)
{ {
MemOpIdx oi = make_memop_idx(MO_BEQ, MMU_USER_IDX); void *haddr;
uint32_t ret;
validate_memop(oi, MO_BEUL);
trace_guest_ld_before_exec(env_cpu(env), addr, oi);
haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_LOAD);
ret = ldl_be_p(haddr);
clear_helper_retaddr();
qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R);
return ret;
}
uint64_t cpu_ldq_be_mmu(CPUArchState *env, abi_ptr addr,
MemOpIdx oi, uintptr_t ra)
{
void *haddr;
uint64_t ret; uint64_t ret;
trace_guest_ld_before_exec(env_cpu(env), ptr, oi); validate_memop(oi, MO_BEQ);
ret = ldq_be_p(g2h(env_cpu(env), ptr)); trace_guest_ld_before_exec(env_cpu(env), addr, oi);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_R); haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_LOAD);
ret = ldq_be_p(haddr);
clear_helper_retaddr();
qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R);
return ret; return ret;
} }
uint32_t cpu_lduw_le_data(CPUArchState *env, abi_ptr ptr) uint16_t cpu_ldw_le_mmu(CPUArchState *env, abi_ptr addr,
MemOpIdx oi, uintptr_t ra)
{ {
MemOpIdx oi = make_memop_idx(MO_LEUW, MMU_USER_IDX); void *haddr;
uint16_t ret;
validate_memop(oi, MO_LEUW);
trace_guest_ld_before_exec(env_cpu(env), addr, oi);
haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_LOAD);
ret = lduw_le_p(haddr);
clear_helper_retaddr();
qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R);
return ret;
}
uint32_t cpu_ldl_le_mmu(CPUArchState *env, abi_ptr addr,
MemOpIdx oi, uintptr_t ra)
{
void *haddr;
uint32_t ret; uint32_t ret;
trace_guest_ld_before_exec(env_cpu(env), ptr, oi); validate_memop(oi, MO_LEUL);
ret = lduw_le_p(g2h(env_cpu(env), ptr)); trace_guest_ld_before_exec(env_cpu(env), addr, oi);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_R); haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_LOAD);
ret = ldl_le_p(haddr);
clear_helper_retaddr();
qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R);
return ret; return ret;
} }
int cpu_ldsw_le_data(CPUArchState *env, abi_ptr ptr) uint64_t cpu_ldq_le_mmu(CPUArchState *env, abi_ptr addr,
MemOpIdx oi, uintptr_t ra)
{ {
return (int16_t)cpu_lduw_le_data(env, ptr); void *haddr;
}
uint32_t cpu_ldl_le_data(CPUArchState *env, abi_ptr ptr)
{
MemOpIdx oi = make_memop_idx(MO_LEUL, MMU_USER_IDX);
uint32_t ret;
trace_guest_ld_before_exec(env_cpu(env), ptr, oi);
ret = ldl_le_p(g2h(env_cpu(env), ptr));
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_R);
return ret;
}
uint64_t cpu_ldq_le_data(CPUArchState *env, abi_ptr ptr)
{
MemOpIdx oi = make_memop_idx(MO_LEQ, MMU_USER_IDX);
uint64_t ret; uint64_t ret;
trace_guest_ld_before_exec(env_cpu(env), ptr, oi); validate_memop(oi, MO_LEQ);
ret = ldq_le_p(g2h(env_cpu(env), ptr)); trace_guest_ld_before_exec(env_cpu(env), addr, oi);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_R); haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_LOAD);
ret = ldq_le_p(haddr);
clear_helper_retaddr();
qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_R);
return ret; return ret;
} }
uint32_t cpu_ldub_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) void cpu_stb_mmu(CPUArchState *env, abi_ptr addr, uint8_t val,
MemOpIdx oi, uintptr_t ra)
{ {
uint32_t ret; void *haddr;
set_helper_retaddr(retaddr); validate_memop(oi, MO_UB);
ret = cpu_ldub_data(env, ptr); trace_guest_st_before_exec(env_cpu(env), addr, oi);
haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_STORE);
stb_p(haddr, val);
clear_helper_retaddr(); clear_helper_retaddr();
return ret; qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W);
} }
int cpu_ldsb_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) void cpu_stw_be_mmu(CPUArchState *env, abi_ptr addr, uint16_t val,
MemOpIdx oi, uintptr_t ra)
{ {
return (int8_t)cpu_ldub_data_ra(env, ptr, retaddr); void *haddr;
}
uint32_t cpu_lduw_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) validate_memop(oi, MO_BEUW);
{ trace_guest_st_before_exec(env_cpu(env), addr, oi);
uint32_t ret; haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_STORE);
stw_be_p(haddr, val);
set_helper_retaddr(retaddr);
ret = cpu_lduw_be_data(env, ptr);
clear_helper_retaddr(); clear_helper_retaddr();
return ret; qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W);
} }
int cpu_ldsw_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) void cpu_stl_be_mmu(CPUArchState *env, abi_ptr addr, uint32_t val,
MemOpIdx oi, uintptr_t ra)
{ {
return (int16_t)cpu_lduw_be_data_ra(env, ptr, retaddr); void *haddr;
}
uint32_t cpu_ldl_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) validate_memop(oi, MO_BEUL);
{ trace_guest_st_before_exec(env_cpu(env), addr, oi);
uint32_t ret; haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_STORE);
stl_be_p(haddr, val);
set_helper_retaddr(retaddr);
ret = cpu_ldl_be_data(env, ptr);
clear_helper_retaddr(); clear_helper_retaddr();
return ret; qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W);
} }
uint64_t cpu_ldq_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) void cpu_stq_be_mmu(CPUArchState *env, abi_ptr addr, uint64_t val,
MemOpIdx oi, uintptr_t ra)
{ {
uint64_t ret; void *haddr;
set_helper_retaddr(retaddr); validate_memop(oi, MO_BEQ);
ret = cpu_ldq_be_data(env, ptr); trace_guest_st_before_exec(env_cpu(env), addr, oi);
haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_STORE);
stq_be_p(haddr, val);
clear_helper_retaddr(); clear_helper_retaddr();
return ret; qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W);
} }
uint32_t cpu_lduw_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) void cpu_stw_le_mmu(CPUArchState *env, abi_ptr addr, uint16_t val,
MemOpIdx oi, uintptr_t ra)
{ {
uint32_t ret; void *haddr;
set_helper_retaddr(retaddr); validate_memop(oi, MO_LEUW);
ret = cpu_lduw_le_data(env, ptr); trace_guest_st_before_exec(env_cpu(env), addr, oi);
haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_STORE);
stw_le_p(haddr, val);
clear_helper_retaddr(); clear_helper_retaddr();
return ret; qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W);
} }
int cpu_ldsw_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) void cpu_stl_le_mmu(CPUArchState *env, abi_ptr addr, uint32_t val,
MemOpIdx oi, uintptr_t ra)
{ {
return (int16_t)cpu_lduw_le_data_ra(env, ptr, retaddr); void *haddr;
}
uint32_t cpu_ldl_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) validate_memop(oi, MO_LEUL);
{ trace_guest_st_before_exec(env_cpu(env), addr, oi);
uint32_t ret; haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_STORE);
stl_le_p(haddr, val);
set_helper_retaddr(retaddr);
ret = cpu_ldl_le_data(env, ptr);
clear_helper_retaddr(); clear_helper_retaddr();
return ret; qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W);
} }
uint64_t cpu_ldq_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t retaddr) void cpu_stq_le_mmu(CPUArchState *env, abi_ptr addr, uint64_t val,
MemOpIdx oi, uintptr_t ra)
{ {
uint64_t ret; void *haddr;
set_helper_retaddr(retaddr); validate_memop(oi, MO_LEQ);
ret = cpu_ldq_le_data(env, ptr); trace_guest_st_before_exec(env_cpu(env), addr, oi);
clear_helper_retaddr(); haddr = cpu_mmu_lookup(env, addr, oi, ra, MMU_DATA_STORE);
return ret; stq_le_p(haddr, val);
}
void cpu_stb_data(CPUArchState *env, abi_ptr ptr, uint32_t val)
{
MemOpIdx oi = make_memop_idx(MO_UB, MMU_USER_IDX);
trace_guest_st_before_exec(env_cpu(env), ptr, oi);
stb_p(g2h(env_cpu(env), ptr), val);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_W);
}
void cpu_stw_be_data(CPUArchState *env, abi_ptr ptr, uint32_t val)
{
MemOpIdx oi = make_memop_idx(MO_BEUW, MMU_USER_IDX);
trace_guest_st_before_exec(env_cpu(env), ptr, oi);
stw_be_p(g2h(env_cpu(env), ptr), val);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_W);
}
void cpu_stl_be_data(CPUArchState *env, abi_ptr ptr, uint32_t val)
{
MemOpIdx oi = make_memop_idx(MO_BEUL, MMU_USER_IDX);
trace_guest_st_before_exec(env_cpu(env), ptr, oi);
stl_be_p(g2h(env_cpu(env), ptr), val);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_W);
}
void cpu_stq_be_data(CPUArchState *env, abi_ptr ptr, uint64_t val)
{
MemOpIdx oi = make_memop_idx(MO_BEQ, MMU_USER_IDX);
trace_guest_st_before_exec(env_cpu(env), ptr, oi);
stq_be_p(g2h(env_cpu(env), ptr), val);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_W);
}
void cpu_stw_le_data(CPUArchState *env, abi_ptr ptr, uint32_t val)
{
MemOpIdx oi = make_memop_idx(MO_LEUW, MMU_USER_IDX);
trace_guest_st_before_exec(env_cpu(env), ptr, oi);
stw_le_p(g2h(env_cpu(env), ptr), val);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_W);
}
void cpu_stl_le_data(CPUArchState *env, abi_ptr ptr, uint32_t val)
{
MemOpIdx oi = make_memop_idx(MO_LEUL, MMU_USER_IDX);
trace_guest_st_before_exec(env_cpu(env), ptr, oi);
stl_le_p(g2h(env_cpu(env), ptr), val);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_W);
}
void cpu_stq_le_data(CPUArchState *env, abi_ptr ptr, uint64_t val)
{
MemOpIdx oi = make_memop_idx(MO_LEQ, MMU_USER_IDX);
trace_guest_st_before_exec(env_cpu(env), ptr, oi);
stq_le_p(g2h(env_cpu(env), ptr), val);
qemu_plugin_vcpu_mem_cb(env_cpu(env), ptr, oi, QEMU_PLUGIN_MEM_W);
}
void cpu_stb_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t retaddr)
{
set_helper_retaddr(retaddr);
cpu_stb_data(env, ptr, val);
clear_helper_retaddr();
}
void cpu_stw_be_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t retaddr)
{
set_helper_retaddr(retaddr);
cpu_stw_be_data(env, ptr, val);
clear_helper_retaddr();
}
void cpu_stl_be_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t retaddr)
{
set_helper_retaddr(retaddr);
cpu_stl_be_data(env, ptr, val);
clear_helper_retaddr();
}
void cpu_stq_be_data_ra(CPUArchState *env, abi_ptr ptr,
uint64_t val, uintptr_t retaddr)
{
set_helper_retaddr(retaddr);
cpu_stq_be_data(env, ptr, val);
clear_helper_retaddr();
}
void cpu_stw_le_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t retaddr)
{
set_helper_retaddr(retaddr);
cpu_stw_le_data(env, ptr, val);
clear_helper_retaddr();
}
void cpu_stl_le_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t retaddr)
{
set_helper_retaddr(retaddr);
cpu_stl_le_data(env, ptr, val);
clear_helper_retaddr();
}
void cpu_stq_le_data_ra(CPUArchState *env, abi_ptr ptr,
uint64_t val, uintptr_t retaddr)
{
set_helper_retaddr(retaddr);
cpu_stq_le_data(env, ptr, val);
clear_helper_retaddr(); clear_helper_retaddr();
qemu_plugin_vcpu_mem_cb(env_cpu(env), addr, oi, QEMU_PLUGIN_MEM_W);
} }
uint32_t cpu_ldub_code(CPUArchState *env, abi_ptr ptr) uint32_t cpu_ldub_code(CPUArchState *env, abi_ptr ptr)
@ -1222,6 +1149,8 @@ uint64_t cpu_ldq_code(CPUArchState *env, abi_ptr ptr)
return ret; return ret;
} }
#include "ldst_common.c.inc"
/* /*
* Do not allow unaligned operations to proceed. Return the host address. * Do not allow unaligned operations to proceed. Return the host address.
* *

View File

@ -68,15 +68,19 @@ Regexes for git grep
- ``\<ldn_\([hbl]e\)?_p\>`` - ``\<ldn_\([hbl]e\)?_p\>``
- ``\<stn_\([hbl]e\)?_p\>`` - ``\<stn_\([hbl]e\)?_p\>``
``cpu_{ld,st}*_mmuidx_ra`` ``cpu_{ld,st}*_mmu``
~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~
These functions operate on a guest virtual address plus a context, These functions operate on a guest virtual address, plus a context
known as a "mmu index" or ``mmuidx``, which controls how that virtual known as a "mmu index" which controls how that virtual address is
address is translated. The meaning of the indexes are target specific, translated, plus a ``MemOp`` which contains alignment requirements
but specifying a particular index might be necessary if, for instance, among other things. The ``MemOp`` and mmu index are combined into
the helper requires an "always as non-privileged" access rather that a single argument of type ``MemOpIdx``.
the default access for the current state of the guest CPU.
The meaning of the indexes are target specific, but specifying a
particular index might be necessary if, for instance, the helper
requires a "always as non-privileged" access rather than the
default access for the current state of the guest CPU.
These functions may cause a guest CPU exception to be taken These functions may cause a guest CPU exception to be taken
(e.g. for an alignment fault or MMU fault) which will result in (e.g. for an alignment fault or MMU fault) which will result in
@ -99,6 +103,35 @@ function, which is a return address into the generated code [#gpc]_.
Function names follow the pattern: Function names follow the pattern:
load: ``cpu_ld{size}{end}_mmu(env, ptr, oi, retaddr)``
store: ``cpu_st{size}{end}_mmu(env, ptr, val, oi, retaddr)``
``size``
- ``b`` : 8 bits
- ``w`` : 16 bits
- ``l`` : 32 bits
- ``q`` : 64 bits
``end``
- (empty) : for target endian, or 8 bit sizes
- ``_be`` : big endian
- ``_le`` : little endian
Regexes for git grep:
- ``\<cpu_ld[bwlq](_[bl]e)\?_mmu\>``
- ``\<cpu_st[bwlq](_[bl]e)\?_mmu\>``
``cpu_{ld,st}*_mmuidx_ra``
~~~~~~~~~~~~~~~~~~~~~~~~~~
These functions work like the ``cpu_{ld,st}_mmu`` functions except
that the ``mmuidx`` parameter is not combined with a ``MemOp``,
and therefore there is no required alignment supplied or enforced.
Function names follow the pattern:
load: ``cpu_ld{sign}{size}{end}_mmuidx_ra(env, ptr, mmuidx, retaddr)`` load: ``cpu_ld{sign}{size}{end}_mmuidx_ra(env, ptr, mmuidx, retaddr)``
store: ``cpu_st{size}{end}_mmuidx_ra(env, ptr, val, mmuidx, retaddr)`` store: ``cpu_st{size}{end}_mmuidx_ra(env, ptr, val, mmuidx, retaddr)``
@ -132,7 +165,8 @@ of the guest CPU, as determined by ``cpu_mmu_index(env, false)``.
These are generally the preferred way to do accesses by guest These are generally the preferred way to do accesses by guest
virtual address from helper functions, unless the access should virtual address from helper functions, unless the access should
be performed with a context other than the default. be performed with a context other than the default, or alignment
should be enforced for the access.
Function names follow the pattern: Function names follow the pattern:

View File

@ -28,10 +28,12 @@
* load: cpu_ld{sign}{size}{end}_{mmusuffix}(env, ptr) * load: cpu_ld{sign}{size}{end}_{mmusuffix}(env, ptr)
* cpu_ld{sign}{size}{end}_{mmusuffix}_ra(env, ptr, retaddr) * cpu_ld{sign}{size}{end}_{mmusuffix}_ra(env, ptr, retaddr)
* cpu_ld{sign}{size}{end}_mmuidx_ra(env, ptr, mmu_idx, retaddr) * cpu_ld{sign}{size}{end}_mmuidx_ra(env, ptr, mmu_idx, retaddr)
* cpu_ld{sign}{size}{end}_mmu(env, ptr, oi, retaddr)
* *
* store: cpu_st{size}{end}_{mmusuffix}(env, ptr, val) * store: cpu_st{size}{end}_{mmusuffix}(env, ptr, val)
* cpu_st{size}{end}_{mmusuffix}_ra(env, ptr, val, retaddr) * cpu_st{size}{end}_{mmusuffix}_ra(env, ptr, val, retaddr)
* cpu_st{size}{end}_mmuidx_ra(env, ptr, val, mmu_idx, retaddr) * cpu_st{size}{end}_mmuidx_ra(env, ptr, val, mmu_idx, retaddr)
* cpu_st{size}{end}_mmu(env, ptr, val, oi, retaddr)
* *
* sign is: * sign is:
* (empty): for 32 and 64 bit sizes * (empty): for 32 and 64 bit sizes
@ -53,10 +55,15 @@
* The "mmuidx" suffix carries an extra mmu_idx argument that specifies * The "mmuidx" suffix carries an extra mmu_idx argument that specifies
* the index to use; the "data" and "code" suffixes take the index from * the index to use; the "data" and "code" suffixes take the index from
* cpu_mmu_index(). * cpu_mmu_index().
*
* The "mmu" suffix carries the full MemOpIdx, with both mmu_idx and the
* MemOp including alignment requirements. The alignment will be enforced.
*/ */
#ifndef CPU_LDST_H #ifndef CPU_LDST_H
#define CPU_LDST_H #define CPU_LDST_H
#include "exec/memopidx.h"
#if defined(CONFIG_USER_ONLY) #if defined(CONFIG_USER_ONLY)
/* sparc32plus has 64bit long but 32bit space address /* sparc32plus has 64bit long but 32bit space address
* this can make bad result with g2h() and h2g() * this can make bad result with g2h() and h2g()
@ -118,12 +125,10 @@ typedef target_ulong abi_ptr;
uint32_t cpu_ldub_data(CPUArchState *env, abi_ptr ptr); uint32_t cpu_ldub_data(CPUArchState *env, abi_ptr ptr);
int cpu_ldsb_data(CPUArchState *env, abi_ptr ptr); int cpu_ldsb_data(CPUArchState *env, abi_ptr ptr);
uint32_t cpu_lduw_be_data(CPUArchState *env, abi_ptr ptr); uint32_t cpu_lduw_be_data(CPUArchState *env, abi_ptr ptr);
int cpu_ldsw_be_data(CPUArchState *env, abi_ptr ptr); int cpu_ldsw_be_data(CPUArchState *env, abi_ptr ptr);
uint32_t cpu_ldl_be_data(CPUArchState *env, abi_ptr ptr); uint32_t cpu_ldl_be_data(CPUArchState *env, abi_ptr ptr);
uint64_t cpu_ldq_be_data(CPUArchState *env, abi_ptr ptr); uint64_t cpu_ldq_be_data(CPUArchState *env, abi_ptr ptr);
uint32_t cpu_lduw_le_data(CPUArchState *env, abi_ptr ptr); uint32_t cpu_lduw_le_data(CPUArchState *env, abi_ptr ptr);
int cpu_ldsw_le_data(CPUArchState *env, abi_ptr ptr); int cpu_ldsw_le_data(CPUArchState *env, abi_ptr ptr);
uint32_t cpu_ldl_le_data(CPUArchState *env, abi_ptr ptr); uint32_t cpu_ldl_le_data(CPUArchState *env, abi_ptr ptr);
@ -131,37 +136,31 @@ uint64_t cpu_ldq_le_data(CPUArchState *env, abi_ptr ptr);
uint32_t cpu_ldub_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); uint32_t cpu_ldub_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
int cpu_ldsb_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); int cpu_ldsb_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
uint32_t cpu_lduw_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); uint32_t cpu_lduw_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
int cpu_ldsw_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); int cpu_ldsw_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
uint32_t cpu_ldl_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); uint32_t cpu_ldl_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
uint64_t cpu_ldq_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); uint64_t cpu_ldq_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
uint32_t cpu_lduw_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); uint32_t cpu_lduw_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
int cpu_ldsw_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); int cpu_ldsw_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
uint32_t cpu_ldl_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); uint32_t cpu_ldl_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
uint64_t cpu_ldq_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra); uint64_t cpu_ldq_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
void cpu_stb_data(CPUArchState *env, abi_ptr ptr, uint32_t val); void cpu_stb_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
void cpu_stw_be_data(CPUArchState *env, abi_ptr ptr, uint32_t val); void cpu_stw_be_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
void cpu_stl_be_data(CPUArchState *env, abi_ptr ptr, uint32_t val); void cpu_stl_be_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
void cpu_stq_be_data(CPUArchState *env, abi_ptr ptr, uint64_t val); void cpu_stq_be_data(CPUArchState *env, abi_ptr ptr, uint64_t val);
void cpu_stw_le_data(CPUArchState *env, abi_ptr ptr, uint32_t val); void cpu_stw_le_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
void cpu_stl_le_data(CPUArchState *env, abi_ptr ptr, uint32_t val); void cpu_stl_le_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
void cpu_stq_le_data(CPUArchState *env, abi_ptr ptr, uint64_t val); void cpu_stq_le_data(CPUArchState *env, abi_ptr ptr, uint64_t val);
void cpu_stb_data_ra(CPUArchState *env, abi_ptr ptr, void cpu_stb_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t ra); uint32_t val, uintptr_t ra);
void cpu_stw_be_data_ra(CPUArchState *env, abi_ptr ptr, void cpu_stw_be_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t ra); uint32_t val, uintptr_t ra);
void cpu_stl_be_data_ra(CPUArchState *env, abi_ptr ptr, void cpu_stl_be_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t ra); uint32_t val, uintptr_t ra);
void cpu_stq_be_data_ra(CPUArchState *env, abi_ptr ptr, void cpu_stq_be_data_ra(CPUArchState *env, abi_ptr ptr,
uint64_t val, uintptr_t ra); uint64_t val, uintptr_t ra);
void cpu_stw_le_data_ra(CPUArchState *env, abi_ptr ptr, void cpu_stw_le_data_ra(CPUArchState *env, abi_ptr ptr,
uint32_t val, uintptr_t ra); uint32_t val, uintptr_t ra);
void cpu_stl_le_data_ra(CPUArchState *env, abi_ptr ptr, void cpu_stl_le_data_ra(CPUArchState *env, abi_ptr ptr,
@ -169,6 +168,71 @@ void cpu_stl_le_data_ra(CPUArchState *env, abi_ptr ptr,
void cpu_stq_le_data_ra(CPUArchState *env, abi_ptr ptr, void cpu_stq_le_data_ra(CPUArchState *env, abi_ptr ptr,
uint64_t val, uintptr_t ra); uint64_t val, uintptr_t ra);
uint32_t cpu_ldub_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
int cpu_ldsb_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
uint32_t cpu_lduw_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
int cpu_ldsw_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
uint32_t cpu_ldl_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
uint64_t cpu_ldq_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
uint32_t cpu_lduw_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
int cpu_ldsw_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
uint32_t cpu_ldl_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
uint64_t cpu_ldq_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
int mmu_idx, uintptr_t ra);
void cpu_stb_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
int mmu_idx, uintptr_t ra);
void cpu_stw_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
int mmu_idx, uintptr_t ra);
void cpu_stl_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
int mmu_idx, uintptr_t ra);
void cpu_stq_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint64_t val,
int mmu_idx, uintptr_t ra);
void cpu_stw_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
int mmu_idx, uintptr_t ra);
void cpu_stl_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
int mmu_idx, uintptr_t ra);
void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint64_t val,
int mmu_idx, uintptr_t ra);
uint8_t cpu_ldb_mmu(CPUArchState *env, abi_ptr ptr, MemOpIdx oi, uintptr_t ra);
uint16_t cpu_ldw_be_mmu(CPUArchState *env, abi_ptr ptr,
MemOpIdx oi, uintptr_t ra);
uint32_t cpu_ldl_be_mmu(CPUArchState *env, abi_ptr ptr,
MemOpIdx oi, uintptr_t ra);
uint64_t cpu_ldq_be_mmu(CPUArchState *env, abi_ptr ptr,
MemOpIdx oi, uintptr_t ra);
uint16_t cpu_ldw_le_mmu(CPUArchState *env, abi_ptr ptr,
MemOpIdx oi, uintptr_t ra);
uint32_t cpu_ldl_le_mmu(CPUArchState *env, abi_ptr ptr,
MemOpIdx oi, uintptr_t ra);
uint64_t cpu_ldq_le_mmu(CPUArchState *env, abi_ptr ptr,
MemOpIdx oi, uintptr_t ra);
void cpu_stb_mmu(CPUArchState *env, abi_ptr ptr, uint8_t val,
MemOpIdx oi, uintptr_t ra);
void cpu_stw_be_mmu(CPUArchState *env, abi_ptr ptr, uint16_t val,
MemOpIdx oi, uintptr_t ra);
void cpu_stl_be_mmu(CPUArchState *env, abi_ptr ptr, uint32_t val,
MemOpIdx oi, uintptr_t ra);
void cpu_stq_be_mmu(CPUArchState *env, abi_ptr ptr, uint64_t val,
MemOpIdx oi, uintptr_t ra);
void cpu_stw_le_mmu(CPUArchState *env, abi_ptr ptr, uint16_t val,
MemOpIdx oi, uintptr_t ra);
void cpu_stl_le_mmu(CPUArchState *env, abi_ptr ptr, uint32_t val,
MemOpIdx oi, uintptr_t ra);
void cpu_stq_le_mmu(CPUArchState *env, abi_ptr ptr, uint64_t val,
MemOpIdx oi, uintptr_t ra);
#if defined(CONFIG_USER_ONLY) #if defined(CONFIG_USER_ONLY)
extern __thread uintptr_t helper_retaddr; extern __thread uintptr_t helper_retaddr;
@ -193,119 +257,6 @@ static inline void clear_helper_retaddr(void)
helper_retaddr = 0; helper_retaddr = 0;
} }
/*
* Provide the same *_mmuidx_ra interface as for softmmu.
* The mmu_idx argument is ignored.
*/
static inline uint32_t cpu_ldub_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_ldub_data_ra(env, addr, ra);
}
static inline int cpu_ldsb_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_ldsb_data_ra(env, addr, ra);
}
static inline uint32_t cpu_lduw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_lduw_be_data_ra(env, addr, ra);
}
static inline int cpu_ldsw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_ldsw_be_data_ra(env, addr, ra);
}
static inline uint32_t cpu_ldl_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_ldl_be_data_ra(env, addr, ra);
}
static inline uint64_t cpu_ldq_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_ldq_be_data_ra(env, addr, ra);
}
static inline uint32_t cpu_lduw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_lduw_le_data_ra(env, addr, ra);
}
static inline int cpu_ldsw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_ldsw_le_data_ra(env, addr, ra);
}
static inline uint32_t cpu_ldl_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_ldl_le_data_ra(env, addr, ra);
}
static inline uint64_t cpu_ldq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra)
{
return cpu_ldq_le_data_ra(env, addr, ra);
}
static inline void cpu_stb_mmuidx_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, int mmu_idx, uintptr_t ra)
{
cpu_stb_data_ra(env, addr, val, ra);
}
static inline void cpu_stw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, int mmu_idx,
uintptr_t ra)
{
cpu_stw_be_data_ra(env, addr, val, ra);
}
static inline void cpu_stl_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, int mmu_idx,
uintptr_t ra)
{
cpu_stl_be_data_ra(env, addr, val, ra);
}
static inline void cpu_stq_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
uint64_t val, int mmu_idx,
uintptr_t ra)
{
cpu_stq_be_data_ra(env, addr, val, ra);
}
static inline void cpu_stw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, int mmu_idx,
uintptr_t ra)
{
cpu_stw_le_data_ra(env, addr, val, ra);
}
static inline void cpu_stl_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
uint32_t val, int mmu_idx,
uintptr_t ra)
{
cpu_stl_le_data_ra(env, addr, val, ra);
}
static inline void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
uint64_t val, int mmu_idx,
uintptr_t ra)
{
cpu_stq_le_data_ra(env, addr, val, ra);
}
#else #else
/* Needed for TCG_OVERSIZED_GUEST */ /* Needed for TCG_OVERSIZED_GUEST */
@ -336,46 +287,6 @@ static inline CPUTLBEntry *tlb_entry(CPUArchState *env, uintptr_t mmu_idx,
return &env_tlb(env)->f[mmu_idx].table[tlb_index(env, mmu_idx, addr)]; return &env_tlb(env)->f[mmu_idx].table[tlb_index(env, mmu_idx, addr)];
} }
uint32_t cpu_ldub_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
int cpu_ldsb_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
uint32_t cpu_lduw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
int cpu_ldsw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
uint32_t cpu_ldl_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
uint64_t cpu_ldq_be_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
uint32_t cpu_lduw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
int cpu_ldsw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
uint32_t cpu_ldl_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
uint64_t cpu_ldq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr,
int mmu_idx, uintptr_t ra);
void cpu_stb_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t retaddr);
void cpu_stw_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t retaddr);
void cpu_stl_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t retaddr);
void cpu_stq_be_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t val,
int mmu_idx, uintptr_t retaddr);
void cpu_stw_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t retaddr);
void cpu_stl_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint32_t val,
int mmu_idx, uintptr_t retaddr);
void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t val,
int mmu_idx, uintptr_t retaddr);
#endif /* defined(CONFIG_USER_ONLY) */ #endif /* defined(CONFIG_USER_ONLY) */
#ifdef TARGET_WORDS_BIGENDIAN #ifdef TARGET_WORDS_BIGENDIAN
@ -391,6 +302,9 @@ void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t val,
# define cpu_ldsw_mmuidx_ra cpu_ldsw_be_mmuidx_ra # define cpu_ldsw_mmuidx_ra cpu_ldsw_be_mmuidx_ra
# define cpu_ldl_mmuidx_ra cpu_ldl_be_mmuidx_ra # define cpu_ldl_mmuidx_ra cpu_ldl_be_mmuidx_ra
# define cpu_ldq_mmuidx_ra cpu_ldq_be_mmuidx_ra # define cpu_ldq_mmuidx_ra cpu_ldq_be_mmuidx_ra
# define cpu_ldw_mmu cpu_ldw_be_mmu
# define cpu_ldl_mmu cpu_ldl_be_mmu
# define cpu_ldq_mmu cpu_ldq_be_mmu
# define cpu_stw_data cpu_stw_be_data # define cpu_stw_data cpu_stw_be_data
# define cpu_stl_data cpu_stl_be_data # define cpu_stl_data cpu_stl_be_data
# define cpu_stq_data cpu_stq_be_data # define cpu_stq_data cpu_stq_be_data
@ -400,6 +314,9 @@ void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t val,
# define cpu_stw_mmuidx_ra cpu_stw_be_mmuidx_ra # define cpu_stw_mmuidx_ra cpu_stw_be_mmuidx_ra
# define cpu_stl_mmuidx_ra cpu_stl_be_mmuidx_ra # define cpu_stl_mmuidx_ra cpu_stl_be_mmuidx_ra
# define cpu_stq_mmuidx_ra cpu_stq_be_mmuidx_ra # define cpu_stq_mmuidx_ra cpu_stq_be_mmuidx_ra
# define cpu_stw_mmu cpu_stw_be_mmu
# define cpu_stl_mmu cpu_stl_be_mmu
# define cpu_stq_mmu cpu_stq_be_mmu
#else #else
# define cpu_lduw_data cpu_lduw_le_data # define cpu_lduw_data cpu_lduw_le_data
# define cpu_ldsw_data cpu_ldsw_le_data # define cpu_ldsw_data cpu_ldsw_le_data
@ -413,6 +330,9 @@ void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t val,
# define cpu_ldsw_mmuidx_ra cpu_ldsw_le_mmuidx_ra # define cpu_ldsw_mmuidx_ra cpu_ldsw_le_mmuidx_ra
# define cpu_ldl_mmuidx_ra cpu_ldl_le_mmuidx_ra # define cpu_ldl_mmuidx_ra cpu_ldl_le_mmuidx_ra
# define cpu_ldq_mmuidx_ra cpu_ldq_le_mmuidx_ra # define cpu_ldq_mmuidx_ra cpu_ldq_le_mmuidx_ra
# define cpu_ldw_mmu cpu_ldw_le_mmu
# define cpu_ldl_mmu cpu_ldl_le_mmu
# define cpu_ldq_mmu cpu_ldq_le_mmu
# define cpu_stw_data cpu_stw_le_data # define cpu_stw_data cpu_stw_le_data
# define cpu_stl_data cpu_stl_le_data # define cpu_stl_data cpu_stl_le_data
# define cpu_stq_data cpu_stq_le_data # define cpu_stq_data cpu_stq_le_data
@ -422,6 +342,9 @@ void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr addr, uint64_t val,
# define cpu_stw_mmuidx_ra cpu_stw_le_mmuidx_ra # define cpu_stw_mmuidx_ra cpu_stw_le_mmuidx_ra
# define cpu_stl_mmuidx_ra cpu_stl_le_mmuidx_ra # define cpu_stl_mmuidx_ra cpu_stl_le_mmuidx_ra
# define cpu_stq_mmuidx_ra cpu_stq_le_mmuidx_ra # define cpu_stq_mmuidx_ra cpu_stq_le_mmuidx_ra
# define cpu_stw_mmu cpu_stw_le_mmu
# define cpu_stl_mmu cpu_stl_le_mmu
# define cpu_stq_mmu cpu_stq_le_mmu
#endif #endif
uint32_t cpu_ldub_code(CPUArchState *env, abi_ptr addr); uint32_t cpu_ldub_code(CPUArchState *env, abi_ptr addr);