diff --git a/src/xenia/cpu/backend/x64/x64_sequences.cc b/src/xenia/cpu/backend/x64/x64_sequences.cc index 25972a244..abbcb2900 100644 --- a/src/xenia/cpu/backend/x64/x64_sequences.cc +++ b/src/xenia/cpu/backend/x64/x64_sequences.cc @@ -3070,7 +3070,7 @@ EMITTER(MUL_I8, MATCH(I, I8<>, I8<>>)) { // dest hi, dest low = src * edx // TODO(justin): Find a way to shorten this has call - if (e.IsFeatureEnabled(kX64EmitAVX2)) { + if (e.IsFeatureEnabled(kX64EmitBMI2)) { // TODO(benvanik): place src2 in edx? if (i.src1.is_constant) { assert_true(!i.src2.is_constant); @@ -3112,7 +3112,7 @@ EMITTER(MUL_I16, MATCH(I, I16<>, I16<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { // dest hi, dest low = src * edx - if (e.IsFeatureEnabled(kX64EmitAVX2)) { + if (e.IsFeatureEnabled(kX64EmitBMI2)) { // TODO(benvanik): place src2 in edx? if (i.src1.is_constant) { assert_true(!i.src2.is_constant); @@ -3155,7 +3155,7 @@ EMITTER(MUL_I32, MATCH(I, I32<>, I32<>>)) { // dest hi, dest low = src * edx // mulx: edx src, 1st op high half, 2nd op low half, 3rd op src2 - if (e.IsFeatureEnabled(kX64EmitAVX2)) { + if (e.IsFeatureEnabled(kX64EmitBMI2)) { // TODO(benvanik): place src2 in edx? if (i.src1.is_constant) { assert_true(!i.src2.is_constant); @@ -3197,7 +3197,7 @@ EMITTER(MUL_I64, MATCH(I, I64<>, I64<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { // dest hi, dest low = src * rdx - if (e.IsFeatureEnabled(kX64EmitAVX2)) { + if (e.IsFeatureEnabled(kX64EmitBMI2)) { // mulx: edx src, 1st op high half, 2nd op low half, 3rd op src2 // TODO(benvanik): place src2 in edx? @@ -3285,7 +3285,7 @@ EMITTER(MUL_HI_I8, MATCH(I, I8<>, I8<>>)) { if (i.instr->flags & ARITHMETIC_UNSIGNED) { // TODO(justin): Find a way to shorten this has call - if (e.IsFeatureEnabled(kX64EmitAVX2)) { + if (e.IsFeatureEnabled(kX64EmitBMI2)) { // TODO(benvanik): place src1 in eax? still need to sign extend e.movzx(e.edx, i.src1); e.mulx(i.dest.reg().cvt32(), e.eax, i.src2.reg().cvt32()); @@ -3325,7 +3325,7 @@ EMITTER(MUL_HI_I16, MATCH(I, I16<>, I16<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { if (i.instr->flags & ARITHMETIC_UNSIGNED) { // TODO(justin): Find a way to shorten this has call - if (e.IsFeatureEnabled(kX64EmitAVX2)) { + if (e.IsFeatureEnabled(kX64EmitBMI2)) { // TODO(benvanik): place src1 in eax? still need to sign extend e.movzx(e.edx, i.src1); e.mulx(i.dest.reg().cvt32(), e.eax, i.src2.reg().cvt32()); @@ -3365,7 +3365,7 @@ EMITTER(MUL_HI_I32, MATCH(I, I32<>, I32<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { if (i.instr->flags & ARITHMETIC_UNSIGNED) { // TODO(justin): Find a way to shorten this has call - if (e.IsFeatureEnabled(kX64EmitAVX2)) { + if (e.IsFeatureEnabled(kX64EmitBMI2)) { // TODO(benvanik): place src1 in eax? still need to sign extend e.mov(e.edx, i.src1); if (i.src2.is_constant) { @@ -3410,7 +3410,7 @@ EMITTER(MUL_HI_I64, MATCH(I, I64<>, I64<>>)) { static void Emit(X64Emitter& e, const EmitArgType& i) { if (i.instr->flags & ARITHMETIC_UNSIGNED) { // TODO(justin): Find a way to shorten this has call - if (e.IsFeatureEnabled(kX64EmitAVX2)) { + if (e.IsFeatureEnabled(kX64EmitBMI2)) { // TODO(benvanik): place src1 in eax? still need to sign extend e.mov(e.rdx, i.src1); if (i.src2.is_constant) {