@@ -2287,7 +2287,9 @@ const struct powerpc_opcode powerpc_opcodes[] = {
{ "vrlw", VX(4, 132), VX_MASK, PPCVEC, { VD, VA, VB } },
{ "vrsqrtefp", VX(4, 330), VX_MASK, PPCVEC, { VD, VB } },
{ "vrldmi", VX(4, 197), VX_MASK, PPCVEC, { VD, VA, VB } },
+{ "vrldnm", VX(4, 453), VX_MASK, PPCVEC, { VD, VA, VB } },
{ "vrlwmi", VX(4, 133), VX_MASK, PPCVEC, { VD, VA, VB} },
+{ "vrlwnm", VX(4, 389), VX_MASK, PPCVEC, { VD, VA, VB } },
{ "vsel", VXA(4, 42), VXA_MASK, PPCVEC, { VD, VA, VB, VC } },
{ "vsl", VX(4, 452), VX_MASK, PPCVEC, { VD, VA, VB } },
{ "vslb", VX(4, 260), VX_MASK, PPCVEC, { VD, VA, VB } },
@@ -327,6 +327,8 @@ DEF_HELPER_3(vrefp, void, env, avr, avr)
DEF_HELPER_3(vrsqrtefp, void, env, avr, avr)
DEF_HELPER_3(vrlwmi, void, avr, avr, avr)
DEF_HELPER_3(vrldmi, void, avr, avr, avr)
+DEF_HELPER_3(vrldnm, void, avr, avr, avr)
+DEF_HELPER_3(vrlwnm, void, avr, avr, avr)
DEF_HELPER_5(vmaddfp, void, env, avr, avr, avr, avr)
DEF_HELPER_5(vnmsubfp, void, env, avr, avr, avr, avr)
DEF_HELPER_3(vexptefp, void, env, avr, avr)
@@ -1776,7 +1776,7 @@ LEFT_ROTATE(64);
#define VRLMI(name, size, element, \
begin_first, begin_last, \
end_first, end_last, \
- shift_first, shift_last) \
+ shift_first, shift_last, insert) \
void helper_##name(ppc_avr_t *r, ppc_avr_t *a, ppc_avr_t *b) \
{ \
int i; \
@@ -1791,19 +1791,37 @@ void helper_##name(ppc_avr_t *r, ppc_avr_t *a, ppc_avr_t *b) \
shift = extract_bits_u##size(src2, shift_first, shift_last);\
rot_val = left_rotate_u##size(src1, shift); \
mask = mask_u##size(begin, end); \
- r->element[i] = (rot_val & mask) | (src3 & ~mask); \
+ if (insert) { \
+ r->element[i] = (rot_val & mask) | (src3 & ~mask); \
+ } else { \
+ r->element[i] = (rot_val & mask); \
+ } \
} \
}
VRLMI(vrldmi, 64, u64,
42, 47, /* begin_first, begin_last */
50, 55, /* end_first, end_last */
- 58, 63); /* shift_first, shift_last */
+ 58, 63, /* shift_first, shift_last */
+ 1); /* mask and insert */
VRLMI(vrlwmi, 32, u32,
11, 15, /* begin_first, begin_last */
19, 23, /* end_first, end_last */
- 27, 31); /* shift_first, shift_last */
+ 27, 31, /* shift_first, shift_last */
+ 1); /* mask and insert */
+
+VRLMI(vrldnm, 64, u64,
+ 42, 47, /* begin_first, begin_last */
+ 50, 55, /* end_first, end_last */
+ 58, 63, /* shift_first, shift_last */
+ 0); /* mask only */
+
+VRLMI(vrlwnm, 32, u32,
+ 11, 15, /* begin_first, begin_last */
+ 19, 23, /* end_first, end_last */
+ 27, 31, /* shift_first, shift_last */
+ 0); /* mask only */
void helper_vsel(CPUPPCState *env, ppc_avr_t *r, ppc_avr_t *a, ppc_avr_t *b,
ppc_avr_t *c)
@@ -442,6 +442,9 @@ GEN_VXFORM(vmulesw, 4, 14);
GEN_VXFORM(vslb, 2, 4);
GEN_VXFORM(vslh, 2, 5);
GEN_VXFORM(vslw, 2, 6);
+GEN_VXFORM(vrlwnm, 2, 6);
+GEN_VXFORM_DUAL(vslw, PPC_ALTIVEC, PPC_NONE, \
+ vrlwnm, PPC_NONE, PPC2_ISA300)
GEN_VXFORM(vsld, 2, 23);
GEN_VXFORM(vsrb, 2, 8);
GEN_VXFORM(vsrh, 2, 9);
@@ -496,6 +499,9 @@ GEN_VXFORM(vrldmi, 2, 3);
GEN_VXFORM_DUAL(vrld, PPC_NONE, PPC2_ALTIVEC_207, \
vrldmi, PPC_NONE, PPC2_ISA300)
GEN_VXFORM(vsl, 2, 7);
+GEN_VXFORM(vrldnm, 2, 7);
+GEN_VXFORM_DUAL(vsl, PPC_ALTIVEC, PPC_NONE, \
+ vrldnm, PPC_NONE, PPC2_ISA300)
GEN_VXFORM(vsr, 2, 11);
GEN_VXFORM_ENV(vpkuhum, 7, 0);
GEN_VXFORM_ENV(vpkuwum, 7, 1);
@@ -107,7 +107,7 @@ GEN_VXFORM(vmulesh, 4, 13),
GEN_VXFORM_207(vmulesw, 4, 14),
GEN_VXFORM(vslb, 2, 4),
GEN_VXFORM(vslh, 2, 5),
-GEN_VXFORM(vslw, 2, 6),
+GEN_VXFORM_DUAL(vslw, vrlwnm, 2, 6, PPC_ALTIVEC, PPC_NONE),
GEN_VXFORM_207(vsld, 2, 23),
GEN_VXFORM(vsrb, 2, 8),
GEN_VXFORM(vsrh, 2, 9),
@@ -145,7 +145,7 @@ GEN_VXFORM(vrlb, 2, 0),
GEN_VXFORM(vrlh, 2, 1),
GEN_VXFORM_DUAL(vrlw, vrlwmi, 2, 2, PPC_ALTIVEC, PPC_NONE),
GEN_VXFORM_DUAL(vrld, vrldmi, 2, 3, PPC_NONE, PPC2_ALTIVEC_207),
-GEN_VXFORM(vsl, 2, 7),
+GEN_VXFORM_DUAL(vsl, vrldnm, 2, 7, PPC_ALTIVEC, PPC_NONE),
GEN_VXFORM(vsr, 2, 11),
GEN_VXFORM(vpkuhum, 7, 0),
GEN_VXFORM(vpkuwum, 7, 1),