@@ -1981,6 +1981,42 @@ VSX_DIV(xsdivsp, 1, float64, VsrD(0), 1, 1)
VSX_DIV(xvdivdp, 2, float64, VsrD(i), 0, 0)
VSX_DIV(xvdivsp, 4, float32, VsrW(i), 0, 0)
+void helper_xsdivqp(CPUPPCState *env, uint32_t opcode)
+{
+ ppc_vsr_t xt, xa, xb;
+
+ getVSR(rA(opcode) + 32, &xa, env);
+ getVSR(rB(opcode) + 32, &xb, env);
+ getVSR(rD(opcode) + 32, &xt, env);
+
+ if (unlikely(Rc(opcode) != 0)) {
+ /* TODO: Support xsdivqpo after round-to-odd is implemented */
+ abort();
+ }
+
+ helper_reset_fpstatus(env);
+ float_status tstat = env->fp_status;
+ set_float_exception_flags(0, &tstat);
+ xt.f128 = float128_div(xa.f128, xb.f128, &tstat);
+ env->fp_status.float_exception_flags |= tstat.float_exception_flags;
+
+ if (unlikely(tstat.float_exception_flags & float_flag_invalid)) {
+ if (float128_is_infinity(xa.f128) && float128_is_infinity(xb.f128)) {
+ float_invalid_op_excp(env, POWERPC_EXCP_FP_VXIDI, 1);
+ } else if (float128_is_zero(xa.f128) &&
+ float128_is_zero(xb.f128)) {
+ float_invalid_op_excp(env, POWERPC_EXCP_FP_VXZDZ, 1);
+ } else if (float128_is_signaling_nan(xa.f128, &tstat) ||
+ float128_is_signaling_nan(xb.f128, &tstat)) {
+ float_invalid_op_excp(env, POWERPC_EXCP_FP_VXSNAN, 1);
+ }
+ }
+
+ helper_compute_fprf_float128(env, xt.f128);
+ putVSR(rD(opcode) + 32, &xt, env);
+ float_check_status(env);
+}
+
/* VSX_RE - VSX floating point reciprocal estimate
* op - instruction mnemonic
* nels - number of elements (1, 2 or 4)
@@ -404,6 +404,7 @@ DEF_HELPER_2(xsaddqp, void, env, i32)
DEF_HELPER_2(xssubdp, void, env, i32)
DEF_HELPER_2(xsmuldp, void, env, i32)
DEF_HELPER_2(xsdivdp, void, env, i32)
+DEF_HELPER_2(xsdivqp, void, env, i32)
DEF_HELPER_2(xsredp, void, env, i32)
DEF_HELPER_2(xssqrtdp, void, env, i32)
DEF_HELPER_2(xsrsqrtedp, void, env, i32)
@@ -781,6 +781,7 @@ GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
+GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
@@ -196,6 +196,7 @@ GEN_XX3FORM(xsaddsp, 0x00, 0x00, PPC2_VSX207),
GEN_XX3FORM(xssubsp, 0x00, 0x01, PPC2_VSX207),
GEN_XX3FORM(xsmulsp, 0x00, 0x02, PPC2_VSX207),
GEN_XX3FORM(xsdivsp, 0x00, 0x03, PPC2_VSX207),
+GEN_VSX_XFORM_300(xsdivqp, 0x04, 0x11, 0x0),
GEN_XX2FORM(xsresp, 0x14, 0x01, PPC2_VSX207),
GEN_XX2FORM(xsrsp, 0x12, 0x11, PPC2_VSX207),
GEN_XX2FORM(xssqrtsp, 0x16, 0x00, PPC2_VSX207),