| // Code generated by ppc64map -fmt=decoder pp64.csv DO NOT EDIT. |
| |
| package ppc64asm |
| |
| const ( |
| _ Op = iota |
| BRD |
| BRH |
| BRW |
| CFUGED |
| CNTLZDM |
| CNTTZDM |
| DCFFIXQQ |
| DCTFIXQQ |
| LXVKQ |
| LXVP |
| LXVPX |
| LXVRBX |
| LXVRDX |
| LXVRHX |
| LXVRWX |
| MTVSRBM |
| MTVSRBMI |
| MTVSRDM |
| MTVSRHM |
| MTVSRQM |
| MTVSRWM |
| PADDI |
| PDEPD |
| PEXTD |
| PLBZ |
| PLD |
| PLFD |
| PLFS |
| PLHA |
| PLHZ |
| PLQ |
| PLWA |
| PLWZ |
| PLXSD |
| PLXSSP |
| PLXV |
| PLXVP |
| PMXVBF16GER2 |
| PMXVBF16GER2NN |
| PMXVBF16GER2NP |
| PMXVBF16GER2PN |
| PMXVBF16GER2PP |
| PMXVF16GER2 |
| PMXVF16GER2NN |
| PMXVF16GER2NP |
| PMXVF16GER2PN |
| PMXVF16GER2PP |
| PMXVF32GER |
| PMXVF32GERNN |
| PMXVF32GERNP |
| PMXVF32GERPN |
| PMXVF32GERPP |
| PMXVF64GER |
| PMXVF64GERNN |
| PMXVF64GERNP |
| PMXVF64GERPN |
| PMXVF64GERPP |
| PMXVI16GER2 |
| PMXVI16GER2PP |
| PMXVI16GER2S |
| PMXVI16GER2SPP |
| PMXVI4GER8 |
| PMXVI4GER8PP |
| PMXVI8GER4 |
| PMXVI8GER4PP |
| PMXVI8GER4SPP |
| PNOP |
| PSTB |
| PSTD |
| PSTFD |
| PSTFS |
| PSTH |
| PSTQ |
| PSTW |
| PSTXSD |
| PSTXSSP |
| PSTXV |
| PSTXVP |
| SETBC |
| SETBCR |
| SETNBC |
| SETNBCR |
| STXVP |
| STXVPX |
| STXVRBX |
| STXVRDX |
| STXVRHX |
| STXVRWX |
| VCFUGED |
| VCLRLB |
| VCLRRB |
| VCLZDM |
| VCMPEQUQ |
| VCMPEQUQCC |
| VCMPGTSQ |
| VCMPGTSQCC |
| VCMPGTUQ |
| VCMPGTUQCC |
| VCMPSQ |
| VCMPUQ |
| VCNTMBB |
| VCNTMBD |
| VCNTMBH |
| VCNTMBW |
| VCTZDM |
| VDIVESD |
| VDIVESQ |
| VDIVESW |
| VDIVEUD |
| VDIVEUQ |
| VDIVEUW |
| VDIVSD |
| VDIVSQ |
| VDIVSW |
| VDIVUD |
| VDIVUQ |
| VDIVUW |
| VEXPANDBM |
| VEXPANDDM |
| VEXPANDHM |
| VEXPANDQM |
| VEXPANDWM |
| VEXTDDVLX |
| VEXTDDVRX |
| VEXTDUBVLX |
| VEXTDUBVRX |
| VEXTDUHVLX |
| VEXTDUHVRX |
| VEXTDUWVLX |
| VEXTDUWVRX |
| VEXTRACTBM |
| VEXTRACTDM |
| VEXTRACTHM |
| VEXTRACTQM |
| VEXTRACTWM |
| VEXTSD2Q |
| VGNB |
| VINSBLX |
| VINSBRX |
| VINSBVLX |
| VINSBVRX |
| VINSD |
| VINSDLX |
| VINSDRX |
| VINSHLX |
| VINSHRX |
| VINSHVLX |
| VINSHVRX |
| VINSW |
| VINSWLX |
| VINSWRX |
| VINSWVLX |
| VINSWVRX |
| VMODSD |
| VMODSQ |
| VMODSW |
| VMODUD |
| VMODUQ |
| VMODUW |
| VMSUMCUD |
| VMULESD |
| VMULEUD |
| VMULHSD |
| VMULHSW |
| VMULHUD |
| VMULHUW |
| VMULLD |
| VMULOSD |
| VMULOUD |
| VPDEPD |
| VPEXTD |
| VRLQ |
| VRLQMI |
| VRLQNM |
| VSLDBI |
| VSLQ |
| VSRAQ |
| VSRDBI |
| VSRQ |
| VSTRIBL |
| VSTRIBLCC |
| VSTRIBR |
| VSTRIBRCC |
| VSTRIHL |
| VSTRIHLCC |
| VSTRIHR |
| VSTRIHRCC |
| XSCMPEQQP |
| XSCMPGEQP |
| XSCMPGTQP |
| XSCVQPSQZ |
| XSCVQPUQZ |
| XSCVSQQP |
| XSCVUQQP |
| XSMAXCQP |
| XSMINCQP |
| XVBF16GER2 |
| XVBF16GER2NN |
| XVBF16GER2NP |
| XVBF16GER2PN |
| XVBF16GER2PP |
| XVCVBF16SPN |
| XVCVSPBF16 |
| XVF16GER2 |
| XVF16GER2NN |
| XVF16GER2NP |
| XVF16GER2PN |
| XVF16GER2PP |
| XVF32GER |
| XVF32GERNN |
| XVF32GERNP |
| XVF32GERPN |
| XVF32GERPP |
| XVF64GER |
| XVF64GERNN |
| XVF64GERNP |
| XVF64GERPN |
| XVF64GERPP |
| XVI16GER2 |
| XVI16GER2PP |
| XVI16GER2S |
| XVI16GER2SPP |
| XVI4GER8 |
| XVI4GER8PP |
| XVI8GER4 |
| XVI8GER4PP |
| XVI8GER4SPP |
| XVTLSBB |
| XXBLENDVB |
| XXBLENDVD |
| XXBLENDVH |
| XXBLENDVW |
| XXEVAL |
| XXGENPCVBM |
| XXGENPCVDM |
| XXGENPCVHM |
| XXGENPCVWM |
| XXMFACC |
| XXMTACC |
| XXPERMX |
| XXSETACCZ |
| XXSPLTI32DX |
| XXSPLTIDP |
| XXSPLTIW |
| MSGCLRU |
| MSGSNDU |
| URFID |
| ADDEX |
| MFFSCDRN |
| MFFSCDRNI |
| MFFSCE |
| MFFSCRN |
| MFFSCRNI |
| MFFSL |
| SLBIAG |
| VMSUMUDM |
| ADDPCIS |
| BCDCFNCC |
| BCDCFSQCC |
| BCDCFZCC |
| BCDCPSGNCC |
| BCDCTNCC |
| BCDCTSQCC |
| BCDCTZCC |
| BCDSCC |
| BCDSETSGNCC |
| BCDSRCC |
| BCDTRUNCCC |
| BCDUSCC |
| BCDUTRUNCCC |
| CMPEQB |
| CMPRB |
| CNTTZD |
| CNTTZDCC |
| CNTTZW |
| CNTTZWCC |
| COPY |
| CPABORT |
| DARN |
| DTSTSFI |
| DTSTSFIQ |
| EXTSWSLI |
| EXTSWSLICC |
| LDAT |
| LWAT |
| LXSD |
| LXSIBZX |
| LXSIHZX |
| LXSSP |
| LXV |
| LXVB16X |
| LXVH8X |
| LXVL |
| LXVLL |
| LXVWSX |
| LXVX |
| MADDHD |
| MADDHDU |
| MADDLD |
| MCRXRX |
| MFVSRLD |
| MODSD |
| MODSW |
| MODUD |
| MODUW |
| MSGSYNC |
| MTVSRDD |
| MTVSRWS |
| PASTECC |
| SETB |
| SLBIEG |
| SLBSYNC |
| STDAT |
| STOP |
| STWAT |
| STXSD |
| STXSIBX |
| STXSIHX |
| STXSSP |
| STXV |
| STXVB16X |
| STXVH8X |
| STXVL |
| STXVLL |
| STXVX |
| VABSDUB |
| VABSDUH |
| VABSDUW |
| VBPERMD |
| VCLZLSBB |
| VCMPNEB |
| VCMPNEBCC |
| VCMPNEH |
| VCMPNEHCC |
| VCMPNEW |
| VCMPNEWCC |
| VCMPNEZB |
| VCMPNEZBCC |
| VCMPNEZH |
| VCMPNEZHCC |
| VCMPNEZW |
| VCMPNEZWCC |
| VCTZB |
| VCTZD |
| VCTZH |
| VCTZLSBB |
| VCTZW |
| VEXTRACTD |
| VEXTRACTUB |
| VEXTRACTUH |
| VEXTRACTUW |
| VEXTSB2D |
| VEXTSB2W |
| VEXTSH2D |
| VEXTSH2W |
| VEXTSW2D |
| VEXTUBLX |
| VEXTUBRX |
| VEXTUHLX |
| VEXTUHRX |
| VEXTUWLX |
| VEXTUWRX |
| VINSERTB |
| VINSERTD |
| VINSERTH |
| VINSERTW |
| VMUL10CUQ |
| VMUL10ECUQ |
| VMUL10EUQ |
| VMUL10UQ |
| VNEGD |
| VNEGW |
| VPERMR |
| VPRTYBD |
| VPRTYBQ |
| VPRTYBW |
| VRLDMI |
| VRLDNM |
| VRLWMI |
| VRLWNM |
| VSLV |
| VSRV |
| WAIT |
| XSABSQP |
| XSADDQP |
| XSADDQPO |
| XSCMPEQDP |
| XSCMPEXPDP |
| XSCMPEXPQP |
| XSCMPGEDP |
| XSCMPGTDP |
| XSCMPOQP |
| XSCMPUQP |
| XSCPSGNQP |
| XSCVDPHP |
| XSCVDPQP |
| XSCVHPDP |
| XSCVQPDP |
| XSCVQPDPO |
| XSCVQPSDZ |
| XSCVQPSWZ |
| XSCVQPUDZ |
| XSCVQPUWZ |
| XSCVSDQP |
| XSCVUDQP |
| XSDIVQP |
| XSDIVQPO |
| XSIEXPDP |
| XSIEXPQP |
| XSMADDQP |
| XSMADDQPO |
| XSMAXCDP |
| XSMAXJDP |
| XSMINCDP |
| XSMINJDP |
| XSMSUBQP |
| XSMSUBQPO |
| XSMULQP |
| XSMULQPO |
| XSNABSQP |
| XSNEGQP |
| XSNMADDQP |
| XSNMADDQPO |
| XSNMSUBQP |
| XSNMSUBQPO |
| XSRQPI |
| XSRQPIX |
| XSRQPXP |
| XSSQRTQP |
| XSSQRTQPO |
| XSSUBQP |
| XSSUBQPO |
| XSTSTDCDP |
| XSTSTDCQP |
| XSTSTDCSP |
| XSXEXPDP |
| XSXEXPQP |
| XSXSIGDP |
| XSXSIGQP |
| XVCVHPSP |
| XVCVSPHP |
| XVIEXPDP |
| XVIEXPSP |
| XVTSTDCDP |
| XVTSTDCSP |
| XVXEXPDP |
| XVXEXPSP |
| XVXSIGDP |
| XVXSIGSP |
| XXBRD |
| XXBRH |
| XXBRQ |
| XXBRW |
| XXEXTRACTUW |
| XXINSERTW |
| XXPERM |
| XXPERMR |
| XXSPLTIB |
| BCDADDCC |
| BCDSUBCC |
| BCTAR |
| BCTARL |
| CLRBHRB |
| FMRGEW |
| FMRGOW |
| ICBT |
| LQARX |
| LXSIWAX |
| LXSIWZX |
| LXSSPX |
| MFBHRBE |
| MFVSRD |
| MFVSRWZ |
| MSGCLR |
| MSGCLRP |
| MSGSND |
| MSGSNDP |
| MTVSRD |
| MTVSRWA |
| MTVSRWZ |
| RFEBB |
| STQCXCC |
| STXSIWX |
| STXSSPX |
| VADDCUQ |
| VADDECUQ |
| VADDEUQM |
| VADDUDM |
| VADDUQM |
| VBPERMQ |
| VCIPHER |
| VCIPHERLAST |
| VCLZB |
| VCLZD |
| VCLZH |
| VCLZW |
| VCMPEQUD |
| VCMPEQUDCC |
| VCMPGTSD |
| VCMPGTSDCC |
| VCMPGTUD |
| VCMPGTUDCC |
| VEQV |
| VGBBD |
| VMAXSD |
| VMAXUD |
| VMINSD |
| VMINUD |
| VMRGEW |
| VMRGOW |
| VMULESW |
| VMULEUW |
| VMULOSW |
| VMULOUW |
| VMULUWM |
| VNAND |
| VNCIPHER |
| VNCIPHERLAST |
| VORC |
| VPERMXOR |
| VPKSDSS |
| VPKSDUS |
| VPKUDUM |
| VPKUDUS |
| VPMSUMB |
| VPMSUMD |
| VPMSUMH |
| VPMSUMW |
| VPOPCNTB |
| VPOPCNTD |
| VPOPCNTH |
| VPOPCNTW |
| VRLD |
| VSBOX |
| VSHASIGMAD |
| VSHASIGMAW |
| VSLD |
| VSRAD |
| VSRD |
| VSUBCUQ |
| VSUBECUQ |
| VSUBEUQM |
| VSUBUDM |
| VSUBUQM |
| VUPKHSW |
| VUPKLSW |
| XSADDSP |
| XSCVDPSPN |
| XSCVSPDPN |
| XSCVSXDSP |
| XSCVUXDSP |
| XSDIVSP |
| XSMADDASP |
| XSMADDMSP |
| XSMSUBASP |
| XSMSUBMSP |
| XSMULSP |
| XSNMADDASP |
| XSNMADDMSP |
| XSNMSUBASP |
| XSNMSUBMSP |
| XSRESP |
| XSRSP |
| XSRSQRTESP |
| XSSQRTSP |
| XSSUBSP |
| XXLEQV |
| XXLNAND |
| XXLORC |
| ADDG6S |
| BPERMD |
| CBCDTD |
| CDTBCD |
| DCFFIX |
| DCFFIXCC |
| DIVDE |
| DIVDECC |
| DIVDEO |
| DIVDEOCC |
| DIVDEU |
| DIVDEUCC |
| DIVDEUO |
| DIVDEUOCC |
| DIVWE |
| DIVWECC |
| DIVWEO |
| DIVWEOCC |
| DIVWEU |
| DIVWEUCC |
| DIVWEUO |
| DIVWEUOCC |
| FCFIDS |
| FCFIDSCC |
| FCFIDU |
| FCFIDUCC |
| FCFIDUS |
| FCFIDUSCC |
| FCTIDU |
| FCTIDUCC |
| FCTIDUZ |
| FCTIDUZCC |
| FCTIWU |
| FCTIWUCC |
| FCTIWUZ |
| FCTIWUZCC |
| FTDIV |
| FTSQRT |
| LBARX |
| LDBRX |
| LFIWZX |
| LHARX |
| LXSDX |
| LXVD2X |
| LXVDSX |
| LXVW4X |
| POPCNTD |
| POPCNTW |
| STBCXCC |
| STDBRX |
| STHCXCC |
| STXSDX |
| STXVD2X |
| STXVW4X |
| XSABSDP |
| XSADDDP |
| XSCMPODP |
| XSCMPUDP |
| XSCPSGNDP |
| XSCVDPSP |
| XSCVDPSXDS |
| XSCVDPSXWS |
| XSCVDPUXDS |
| XSCVDPUXWS |
| XSCVSPDP |
| XSCVSXDDP |
| XSCVUXDDP |
| XSDIVDP |
| XSMADDADP |
| XSMADDMDP |
| XSMAXDP |
| XSMINDP |
| XSMSUBADP |
| XSMSUBMDP |
| XSMULDP |
| XSNABSDP |
| XSNEGDP |
| XSNMADDADP |
| XSNMADDMDP |
| XSNMSUBADP |
| XSNMSUBMDP |
| XSRDPI |
| XSRDPIC |
| XSRDPIM |
| XSRDPIP |
| XSRDPIZ |
| XSREDP |
| XSRSQRTEDP |
| XSSQRTDP |
| XSSUBDP |
| XSTDIVDP |
| XSTSQRTDP |
| XVABSDP |
| XVABSSP |
| XVADDDP |
| XVADDSP |
| XVCMPEQDP |
| XVCMPEQDPCC |
| XVCMPEQSP |
| XVCMPEQSPCC |
| XVCMPGEDP |
| XVCMPGEDPCC |
| XVCMPGESP |
| XVCMPGESPCC |
| XVCMPGTDP |
| XVCMPGTDPCC |
| XVCMPGTSP |
| XVCMPGTSPCC |
| XVCPSGNDP |
| XVCPSGNSP |
| XVCVDPSP |
| XVCVDPSXDS |
| XVCVDPSXWS |
| XVCVDPUXDS |
| XVCVDPUXWS |
| XVCVSPDP |
| XVCVSPSXDS |
| XVCVSPSXWS |
| XVCVSPUXDS |
| XVCVSPUXWS |
| XVCVSXDDP |
| XVCVSXDSP |
| XVCVSXWDP |
| XVCVSXWSP |
| XVCVUXDDP |
| XVCVUXDSP |
| XVCVUXWDP |
| XVCVUXWSP |
| XVDIVDP |
| XVDIVSP |
| XVMADDADP |
| XVMADDASP |
| XVMADDMDP |
| XVMADDMSP |
| XVMAXDP |
| XVMAXSP |
| XVMINDP |
| XVMINSP |
| XVMSUBADP |
| XVMSUBASP |
| XVMSUBMDP |
| XVMSUBMSP |
| XVMULDP |
| XVMULSP |
| XVNABSDP |
| XVNABSSP |
| XVNEGDP |
| XVNEGSP |
| XVNMADDADP |
| XVNMADDASP |
| XVNMADDMDP |
| XVNMADDMSP |
| XVNMSUBADP |
| XVNMSUBASP |
| XVNMSUBMDP |
| XVNMSUBMSP |
| XVRDPI |
| XVRDPIC |
| XVRDPIM |
| XVRDPIP |
| XVRDPIZ |
| XVREDP |
| XVRESP |
| XVRSPI |
| XVRSPIC |
| XVRSPIM |
| XVRSPIP |
| XVRSPIZ |
| XVRSQRTEDP |
| XVRSQRTESP |
| XVSQRTDP |
| XVSQRTSP |
| XVSUBDP |
| XVSUBSP |
| XVTDIVDP |
| XVTDIVSP |
| XVTSQRTDP |
| XVTSQRTSP |
| XXLAND |
| XXLANDC |
| XXLNOR |
| XXLOR |
| XXLXOR |
| XXMRGHW |
| XXMRGLW |
| XXPERMDI |
| XXSEL |
| XXSLDWI |
| XXSPLTW |
| CMPB |
| DADD |
| DADDCC |
| DADDQ |
| DADDQCC |
| DCFFIXQ |
| DCFFIXQCC |
| DCMPO |
| DCMPOQ |
| DCMPU |
| DCMPUQ |
| DCTDP |
| DCTDPCC |
| DCTFIX |
| DCTFIXCC |
| DCTFIXQ |
| DCTFIXQCC |
| DCTQPQ |
| DCTQPQCC |
| DDEDPD |
| DDEDPDCC |
| DDEDPDQ |
| DDEDPDQCC |
| DDIV |
| DDIVCC |
| DDIVQ |
| DDIVQCC |
| DENBCD |
| DENBCDCC |
| DENBCDQ |
| DENBCDQCC |
| DIEX |
| DIEXCC |
| DIEXQCC |
| DIEXQ |
| DMUL |
| DMULCC |
| DMULQ |
| DMULQCC |
| DQUA |
| DQUACC |
| DQUAI |
| DQUAICC |
| DQUAIQ |
| DQUAIQCC |
| DQUAQ |
| DQUAQCC |
| DRDPQ |
| DRDPQCC |
| DRINTN |
| DRINTNCC |
| DRINTNQ |
| DRINTNQCC |
| DRINTX |
| DRINTXCC |
| DRINTXQ |
| DRINTXQCC |
| DRRND |
| DRRNDCC |
| DRRNDQ |
| DRRNDQCC |
| DRSP |
| DRSPCC |
| DSCLI |
| DSCLICC |
| DSCLIQ |
| DSCLIQCC |
| DSCRI |
| DSCRICC |
| DSCRIQ |
| DSCRIQCC |
| DSUB |
| DSUBCC |
| DSUBQ |
| DSUBQCC |
| DTSTDC |
| DTSTDCQ |
| DTSTDG |
| DTSTDGQ |
| DTSTEX |
| DTSTEXQ |
| DTSTSF |
| DTSTSFQ |
| DXEX |
| DXEXCC |
| DXEXQ |
| DXEXQCC |
| FCPSGN |
| FCPSGNCC |
| LBZCIX |
| LDCIX |
| LFDP |
| LFDPX |
| LFIWAX |
| LHZCIX |
| LWZCIX |
| PRTYD |
| PRTYW |
| SLBFEECC |
| STBCIX |
| STDCIX |
| STFDP |
| STFDPX |
| STHCIX |
| STWCIX |
| ISEL |
| LVEBX |
| LVEHX |
| LVEWX |
| LVSL |
| LVSR |
| LVX |
| LVXL |
| MFVSCR |
| MTVSCR |
| STVEBX |
| STVEHX |
| STVEWX |
| STVX |
| STVXL |
| TLBIEL |
| VADDCUW |
| VADDFP |
| VADDSBS |
| VADDSHS |
| VADDSWS |
| VADDUBM |
| VADDUBS |
| VADDUHM |
| VADDUHS |
| VADDUWM |
| VADDUWS |
| VAND |
| VANDC |
| VAVGSB |
| VAVGSH |
| VAVGSW |
| VAVGUB |
| VAVGUH |
| VAVGUW |
| VCFSX |
| VCFUX |
| VCMPBFP |
| VCMPBFPCC |
| VCMPEQFP |
| VCMPEQFPCC |
| VCMPEQUB |
| VCMPEQUBCC |
| VCMPEQUH |
| VCMPEQUHCC |
| VCMPEQUW |
| VCMPEQUWCC |
| VCMPGEFP |
| VCMPGEFPCC |
| VCMPGTFP |
| VCMPGTFPCC |
| VCMPGTSB |
| VCMPGTSBCC |
| VCMPGTSH |
| VCMPGTSHCC |
| VCMPGTSW |
| VCMPGTSWCC |
| VCMPGTUB |
| VCMPGTUBCC |
| VCMPGTUH |
| VCMPGTUHCC |
| VCMPGTUW |
| VCMPGTUWCC |
| VCTSXS |
| VCTUXS |
| VEXPTEFP |
| VLOGEFP |
| VMADDFP |
| VMAXFP |
| VMAXSB |
| VMAXSH |
| VMAXSW |
| VMAXUB |
| VMAXUH |
| VMAXUW |
| VMHADDSHS |
| VMHRADDSHS |
| VMINFP |
| VMINSB |
| VMINSH |
| VMINSW |
| VMINUB |
| VMINUH |
| VMINUW |
| VMLADDUHM |
| VMRGHB |
| VMRGHH |
| VMRGHW |
| VMRGLB |
| VMRGLH |
| VMRGLW |
| VMSUMMBM |
| VMSUMSHM |
| VMSUMSHS |
| VMSUMUBM |
| VMSUMUHM |
| VMSUMUHS |
| VMULESB |
| VMULESH |
| VMULEUB |
| VMULEUH |
| VMULOSB |
| VMULOSH |
| VMULOUB |
| VMULOUH |
| VNMSUBFP |
| VNOR |
| VOR |
| VPERM |
| VPKPX |
| VPKSHSS |
| VPKSHUS |
| VPKSWSS |
| VPKSWUS |
| VPKUHUM |
| VPKUHUS |
| VPKUWUM |
| VPKUWUS |
| VREFP |
| VRFIM |
| VRFIN |
| VRFIP |
| VRFIZ |
| VRLB |
| VRLH |
| VRLW |
| VRSQRTEFP |
| VSEL |
| VSL |
| VSLB |
| VSLDOI |
| VSLH |
| VSLO |
| VSLW |
| VSPLTB |
| VSPLTH |
| VSPLTISB |
| VSPLTISH |
| VSPLTISW |
| VSPLTW |
| VSR |
| VSRAB |
| VSRAH |
| VSRAW |
| VSRB |
| VSRH |
| VSRO |
| VSRW |
| VSUBCUW |
| VSUBFP |
| VSUBSBS |
| VSUBSHS |
| VSUBSWS |
| VSUBUBM |
| VSUBUBS |
| VSUBUHM |
| VSUBUHS |
| VSUBUWM |
| VSUBUWS |
| VSUM2SWS |
| VSUM4SBS |
| VSUM4SHS |
| VSUM4UBS |
| VSUMSWS |
| VUPKHPX |
| VUPKHSB |
| VUPKHSH |
| VUPKLPX |
| VUPKLSB |
| VUPKLSH |
| VXOR |
| FRE |
| FRECC |
| FRIM |
| FRIMCC |
| FRIN |
| FRINCC |
| FRIP |
| FRIPCC |
| FRIZ |
| FRIZCC |
| FRSQRTES |
| FRSQRTESCC |
| HRFID |
| POPCNTB |
| MFOCRF |
| MTOCRF |
| SLBMFEE |
| SLBMFEV |
| SLBMTE |
| RFSCV |
| SCV |
| LQ |
| STQ |
| CNTLZD |
| CNTLZDCC |
| DCBF |
| DCBST |
| DCBT |
| DCBTST |
| DIVD |
| DIVDCC |
| DIVDO |
| DIVDOCC |
| DIVDU |
| DIVDUCC |
| DIVDUO |
| DIVDUOCC |
| DIVW |
| DIVWCC |
| DIVWO |
| DIVWOCC |
| DIVWU |
| DIVWUCC |
| DIVWUO |
| DIVWUOCC |
| EIEIO |
| EXTSB |
| EXTSBCC |
| EXTSW |
| EXTSWCC |
| FADDS |
| FADDSCC |
| FCFID |
| FCFIDCC |
| FCTID |
| FCTIDCC |
| FCTIDZ |
| FCTIDZCC |
| FDIVS |
| FDIVSCC |
| FMADDS |
| FMADDSCC |
| FMSUBS |
| FMSUBSCC |
| FMULS |
| FMULSCC |
| FNMADDS |
| FNMADDSCC |
| FNMSUBS |
| FNMSUBSCC |
| FRES |
| FRESCC |
| FRSQRTE |
| FRSQRTECC |
| FSEL |
| FSELCC |
| FSQRTS |
| FSQRTSCC |
| FSUBS |
| FSUBSCC |
| ICBI |
| LD |
| LDARX |
| LDU |
| LDUX |
| LDX |
| LWA |
| LWARX |
| LWAUX |
| LWAX |
| MFTB |
| MTMSRD |
| MULHD |
| MULHDCC |
| MULHDU |
| MULHDUCC |
| MULHW |
| MULHWCC |
| MULHWU |
| MULHWUCC |
| MULLD |
| MULLDCC |
| MULLDO |
| MULLDOCC |
| RFID |
| RLDCL |
| RLDCLCC |
| RLDCR |
| RLDCRCC |
| RLDIC |
| RLDICCC |
| RLDICL |
| RLDICLCC |
| RLDICR |
| RLDICRCC |
| RLDIMI |
| RLDIMICC |
| SC |
| SLBIA |
| SLBIE |
| SLD |
| SLDCC |
| SRAD |
| SRADCC |
| SRADI |
| SRADICC |
| SRD |
| SRDCC |
| STD |
| STDCXCC |
| STDU |
| STDUX |
| STDX |
| STFIWX |
| STWCXCC |
| SUBF |
| SUBFCC |
| SUBFO |
| SUBFOCC |
| TD |
| TDI |
| TLBSYNC |
| FCTIW |
| FCTIWCC |
| FCTIWZ |
| FCTIWZCC |
| FSQRT |
| FSQRTCC |
| ADD |
| ADDCC |
| ADDO |
| ADDOCC |
| ADDC |
| ADDCCC |
| ADDCO |
| ADDCOCC |
| ADDE |
| ADDECC |
| ADDEO |
| ADDEOCC |
| LI |
| ADDI |
| ADDIC |
| ADDICCC |
| LIS |
| ADDIS |
| ADDME |
| ADDMECC |
| ADDMEO |
| ADDMEOCC |
| ADDZE |
| ADDZECC |
| ADDZEO |
| ADDZEOCC |
| AND |
| ANDCC |
| ANDC |
| ANDCCC |
| ANDICC |
| ANDISCC |
| B |
| BA |
| BL |
| BLA |
| BC |
| BCA |
| BCL |
| BCLA |
| BCCTR |
| BCCTRL |
| BCLR |
| BCLRL |
| CMPW |
| CMPD |
| CMP |
| CMPWI |
| CMPDI |
| CMPI |
| CMPLW |
| CMPLD |
| CMPL |
| CMPLWI |
| CMPLDI |
| CMPLI |
| CNTLZW |
| CNTLZWCC |
| CRAND |
| CRANDC |
| CREQV |
| CRNAND |
| CRNOR |
| CROR |
| CRORC |
| CRXOR |
| DCBZ |
| EQV |
| EQVCC |
| EXTSH |
| EXTSHCC |
| FABS |
| FABSCC |
| FADD |
| FADDCC |
| FCMPO |
| FCMPU |
| FDIV |
| FDIVCC |
| FMADD |
| FMADDCC |
| FMR |
| FMRCC |
| FMSUB |
| FMSUBCC |
| FMUL |
| FMULCC |
| FNABS |
| FNABSCC |
| FNEG |
| FNEGCC |
| FNMADD |
| FNMADDCC |
| FNMSUB |
| FNMSUBCC |
| FRSP |
| FRSPCC |
| FSUB |
| FSUBCC |
| ISYNC |
| LBZ |
| LBZU |
| LBZUX |
| LBZX |
| LFD |
| LFDU |
| LFDUX |
| LFDX |
| LFS |
| LFSU |
| LFSUX |
| LFSX |
| LHA |
| LHAU |
| LHAUX |
| LHAX |
| LHBRX |
| LHZ |
| LHZU |
| LHZUX |
| LHZX |
| LMW |
| LSWI |
| LSWX |
| LWBRX |
| LWZ |
| LWZU |
| LWZUX |
| LWZX |
| MCRF |
| MCRFS |
| MFCR |
| MFFS |
| MFFSCC |
| MFMSR |
| MFSPR |
| MTCRF |
| MTFSB0 |
| MTFSB0CC |
| MTFSB1 |
| MTFSB1CC |
| MTFSF |
| MTFSFCC |
| MTFSFI |
| MTFSFICC |
| MTMSR |
| MTSPR |
| MULLI |
| MULLW |
| MULLWCC |
| MULLWO |
| MULLWOCC |
| NAND |
| NANDCC |
| NEG |
| NEGCC |
| NEGO |
| NEGOCC |
| NOR |
| NORCC |
| OR |
| ORCC |
| ORC |
| ORCCC |
| NOP |
| ORI |
| ORIS |
| RLWIMI |
| RLWIMICC |
| RLWINM |
| RLWINMCC |
| RLWNM |
| RLWNMCC |
| SLW |
| SLWCC |
| SRAW |
| SRAWCC |
| SRAWI |
| SRAWICC |
| SRW |
| SRWCC |
| STB |
| STBU |
| STBUX |
| STBX |
| STFD |
| STFDU |
| STFDUX |
| STFDX |
| STFS |
| STFSU |
| STFSUX |
| STFSX |
| STH |
| STHBRX |
| STHU |
| STHUX |
| STHX |
| STMW |
| STSWI |
| STSWX |
| STW |
| STWBRX |
| STWU |
| STWUX |
| STWX |
| SUBFC |
| SUBFCCC |
| SUBFCO |
| SUBFCOCC |
| SUBFE |
| SUBFECC |
| SUBFEO |
| SUBFEOCC |
| SUBFIC |
| SUBFME |
| SUBFMECC |
| SUBFMEO |
| SUBFMEOCC |
| SUBFZE |
| SUBFZECC |
| SUBFZEO |
| SUBFZEOCC |
| SYNC |
| TLBIE |
| TW |
| TWI |
| XOR |
| XORCC |
| XORI |
| XORIS |
| ) |
| |
| var opstr = [...]string{ |
| BRD: "brd", |
| BRH: "brh", |
| BRW: "brw", |
| CFUGED: "cfuged", |
| CNTLZDM: "cntlzdm", |
| CNTTZDM: "cnttzdm", |
| DCFFIXQQ: "dcffixqq", |
| DCTFIXQQ: "dctfixqq", |
| LXVKQ: "lxvkq", |
| LXVP: "lxvp", |
| LXVPX: "lxvpx", |
| LXVRBX: "lxvrbx", |
| LXVRDX: "lxvrdx", |
| LXVRHX: "lxvrhx", |
| LXVRWX: "lxvrwx", |
| MTVSRBM: "mtvsrbm", |
| MTVSRBMI: "mtvsrbmi", |
| MTVSRDM: "mtvsrdm", |
| MTVSRHM: "mtvsrhm", |
| MTVSRQM: "mtvsrqm", |
| MTVSRWM: "mtvsrwm", |
| PADDI: "paddi", |
| PDEPD: "pdepd", |
| PEXTD: "pextd", |
| PLBZ: "plbz", |
| PLD: "pld", |
| PLFD: "plfd", |
| PLFS: "plfs", |
| PLHA: "plha", |
| PLHZ: "plhz", |
| PLQ: "plq", |
| PLWA: "plwa", |
| PLWZ: "plwz", |
| PLXSD: "plxsd", |
| PLXSSP: "plxssp", |
| PLXV: "plxv", |
| PLXVP: "plxvp", |
| PMXVBF16GER2: "pmxvbf16ger2", |
| PMXVBF16GER2NN: "pmxvbf16ger2nn", |
| PMXVBF16GER2NP: "pmxvbf16ger2np", |
| PMXVBF16GER2PN: "pmxvbf16ger2pn", |
| PMXVBF16GER2PP: "pmxvbf16ger2pp", |
| PMXVF16GER2: "pmxvf16ger2", |
| PMXVF16GER2NN: "pmxvf16ger2nn", |
| PMXVF16GER2NP: "pmxvf16ger2np", |
| PMXVF16GER2PN: "pmxvf16ger2pn", |
| PMXVF16GER2PP: "pmxvf16ger2pp", |
| PMXVF32GER: "pmxvf32ger", |
| PMXVF32GERNN: "pmxvf32gernn", |
| PMXVF32GERNP: "pmxvf32gernp", |
| PMXVF32GERPN: "pmxvf32gerpn", |
| PMXVF32GERPP: "pmxvf32gerpp", |
| PMXVF64GER: "pmxvf64ger", |
| PMXVF64GERNN: "pmxvf64gernn", |
| PMXVF64GERNP: "pmxvf64gernp", |
| PMXVF64GERPN: "pmxvf64gerpn", |
| PMXVF64GERPP: "pmxvf64gerpp", |
| PMXVI16GER2: "pmxvi16ger2", |
| PMXVI16GER2PP: "pmxvi16ger2pp", |
| PMXVI16GER2S: "pmxvi16ger2s", |
| PMXVI16GER2SPP: "pmxvi16ger2spp", |
| PMXVI4GER8: "pmxvi4ger8", |
| PMXVI4GER8PP: "pmxvi4ger8pp", |
| PMXVI8GER4: "pmxvi8ger4", |
| PMXVI8GER4PP: "pmxvi8ger4pp", |
| PMXVI8GER4SPP: "pmxvi8ger4spp", |
| PNOP: "pnop", |
| PSTB: "pstb", |
| PSTD: "pstd", |
| PSTFD: "pstfd", |
| PSTFS: "pstfs", |
| PSTH: "psth", |
| PSTQ: "pstq", |
| PSTW: "pstw", |
| PSTXSD: "pstxsd", |
| PSTXSSP: "pstxssp", |
| PSTXV: "pstxv", |
| PSTXVP: "pstxvp", |
| SETBC: "setbc", |
| SETBCR: "setbcr", |
| SETNBC: "setnbc", |
| SETNBCR: "setnbcr", |
| STXVP: "stxvp", |
| STXVPX: "stxvpx", |
| STXVRBX: "stxvrbx", |
| STXVRDX: "stxvrdx", |
| STXVRHX: "stxvrhx", |
| STXVRWX: "stxvrwx", |
| VCFUGED: "vcfuged", |
| VCLRLB: "vclrlb", |
| VCLRRB: "vclrrb", |
| VCLZDM: "vclzdm", |
| VCMPEQUQ: "vcmpequq", |
| VCMPEQUQCC: "vcmpequq.", |
| VCMPGTSQ: "vcmpgtsq", |
| VCMPGTSQCC: "vcmpgtsq.", |
| VCMPGTUQ: "vcmpgtuq", |
| VCMPGTUQCC: "vcmpgtuq.", |
| VCMPSQ: "vcmpsq", |
| VCMPUQ: "vcmpuq", |
| VCNTMBB: "vcntmbb", |
| VCNTMBD: "vcntmbd", |
| VCNTMBH: "vcntmbh", |
| VCNTMBW: "vcntmbw", |
| VCTZDM: "vctzdm", |
| VDIVESD: "vdivesd", |
| VDIVESQ: "vdivesq", |
| VDIVESW: "vdivesw", |
| VDIVEUD: "vdiveud", |
| VDIVEUQ: "vdiveuq", |
| VDIVEUW: "vdiveuw", |
| VDIVSD: "vdivsd", |
| VDIVSQ: "vdivsq", |
| VDIVSW: "vdivsw", |
| VDIVUD: "vdivud", |
| VDIVUQ: "vdivuq", |
| VDIVUW: "vdivuw", |
| VEXPANDBM: "vexpandbm", |
| VEXPANDDM: "vexpanddm", |
| VEXPANDHM: "vexpandhm", |
| VEXPANDQM: "vexpandqm", |
| VEXPANDWM: "vexpandwm", |
| VEXTDDVLX: "vextddvlx", |
| VEXTDDVRX: "vextddvrx", |
| VEXTDUBVLX: "vextdubvlx", |
| VEXTDUBVRX: "vextdubvrx", |
| VEXTDUHVLX: "vextduhvlx", |
| VEXTDUHVRX: "vextduhvrx", |
| VEXTDUWVLX: "vextduwvlx", |
| VEXTDUWVRX: "vextduwvrx", |
| VEXTRACTBM: "vextractbm", |
| VEXTRACTDM: "vextractdm", |
| VEXTRACTHM: "vextracthm", |
| VEXTRACTQM: "vextractqm", |
| VEXTRACTWM: "vextractwm", |
| VEXTSD2Q: "vextsd2q", |
| VGNB: "vgnb", |
| VINSBLX: "vinsblx", |
| VINSBRX: "vinsbrx", |
| VINSBVLX: "vinsbvlx", |
| VINSBVRX: "vinsbvrx", |
| VINSD: "vinsd", |
| VINSDLX: "vinsdlx", |
| VINSDRX: "vinsdrx", |
| VINSHLX: "vinshlx", |
| VINSHRX: "vinshrx", |
| VINSHVLX: "vinshvlx", |
| VINSHVRX: "vinshvrx", |
| VINSW: "vinsw", |
| VINSWLX: "vinswlx", |
| VINSWRX: "vinswrx", |
| VINSWVLX: "vinswvlx", |
| VINSWVRX: "vinswvrx", |
| VMODSD: "vmodsd", |
| VMODSQ: "vmodsq", |
| VMODSW: "vmodsw", |
| VMODUD: "vmodud", |
| VMODUQ: "vmoduq", |
| VMODUW: "vmoduw", |
| VMSUMCUD: "vmsumcud", |
| VMULESD: "vmulesd", |
| VMULEUD: "vmuleud", |
| VMULHSD: "vmulhsd", |
| VMULHSW: "vmulhsw", |
| VMULHUD: "vmulhud", |
| VMULHUW: "vmulhuw", |
| VMULLD: "vmulld", |
| VMULOSD: "vmulosd", |
| VMULOUD: "vmuloud", |
| VPDEPD: "vpdepd", |
| VPEXTD: "vpextd", |
| VRLQ: "vrlq", |
| VRLQMI: "vrlqmi", |
| VRLQNM: "vrlqnm", |
| VSLDBI: "vsldbi", |
| VSLQ: "vslq", |
| VSRAQ: "vsraq", |
| VSRDBI: "vsrdbi", |
| VSRQ: "vsrq", |
| VSTRIBL: "vstribl", |
| VSTRIBLCC: "vstribl.", |
| VSTRIBR: "vstribr", |
| VSTRIBRCC: "vstribr.", |
| VSTRIHL: "vstrihl", |
| VSTRIHLCC: "vstrihl.", |
| VSTRIHR: "vstrihr", |
| VSTRIHRCC: "vstrihr.", |
| XSCMPEQQP: "xscmpeqqp", |
| XSCMPGEQP: "xscmpgeqp", |
| XSCMPGTQP: "xscmpgtqp", |
| XSCVQPSQZ: "xscvqpsqz", |
| XSCVQPUQZ: "xscvqpuqz", |
| XSCVSQQP: "xscvsqqp", |
| XSCVUQQP: "xscvuqqp", |
| XSMAXCQP: "xsmaxcqp", |
| XSMINCQP: "xsmincqp", |
| XVBF16GER2: "xvbf16ger2", |
| XVBF16GER2NN: "xvbf16ger2nn", |
| XVBF16GER2NP: "xvbf16ger2np", |
| XVBF16GER2PN: "xvbf16ger2pn", |
| XVBF16GER2PP: "xvbf16ger2pp", |
| XVCVBF16SPN: "xvcvbf16spn", |
| XVCVSPBF16: "xvcvspbf16", |
| XVF16GER2: "xvf16ger2", |
| XVF16GER2NN: "xvf16ger2nn", |
| XVF16GER2NP: "xvf16ger2np", |
| XVF16GER2PN: "xvf16ger2pn", |
| XVF16GER2PP: "xvf16ger2pp", |
| XVF32GER: "xvf32ger", |
| XVF32GERNN: "xvf32gernn", |
| XVF32GERNP: "xvf32gernp", |
| XVF32GERPN: "xvf32gerpn", |
| XVF32GERPP: "xvf32gerpp", |
| XVF64GER: "xvf64ger", |
| XVF64GERNN: "xvf64gernn", |
| XVF64GERNP: "xvf64gernp", |
| XVF64GERPN: "xvf64gerpn", |
| XVF64GERPP: "xvf64gerpp", |
| XVI16GER2: "xvi16ger2", |
| XVI16GER2PP: "xvi16ger2pp", |
| XVI16GER2S: "xvi16ger2s", |
| XVI16GER2SPP: "xvi16ger2spp", |
| XVI4GER8: "xvi4ger8", |
| XVI4GER8PP: "xvi4ger8pp", |
| XVI8GER4: "xvi8ger4", |
| XVI8GER4PP: "xvi8ger4pp", |
| XVI8GER4SPP: "xvi8ger4spp", |
| XVTLSBB: "xvtlsbb", |
| XXBLENDVB: "xxblendvb", |
| XXBLENDVD: "xxblendvd", |
| XXBLENDVH: "xxblendvh", |
| XXBLENDVW: "xxblendvw", |
| XXEVAL: "xxeval", |
| XXGENPCVBM: "xxgenpcvbm", |
| XXGENPCVDM: "xxgenpcvdm", |
| XXGENPCVHM: "xxgenpcvhm", |
| XXGENPCVWM: "xxgenpcvwm", |
| XXMFACC: "xxmfacc", |
| XXMTACC: "xxmtacc", |
| XXPERMX: "xxpermx", |
| XXSETACCZ: "xxsetaccz", |
| XXSPLTI32DX: "xxsplti32dx", |
| XXSPLTIDP: "xxspltidp", |
| XXSPLTIW: "xxspltiw", |
| MSGCLRU: "msgclru", |
| MSGSNDU: "msgsndu", |
| URFID: "urfid", |
| ADDEX: "addex", |
| MFFSCDRN: "mffscdrn", |
| MFFSCDRNI: "mffscdrni", |
| MFFSCE: "mffsce", |
| MFFSCRN: "mffscrn", |
| MFFSCRNI: "mffscrni", |
| MFFSL: "mffsl", |
| SLBIAG: "slbiag", |
| VMSUMUDM: "vmsumudm", |
| ADDPCIS: "addpcis", |
| BCDCFNCC: "bcdcfn.", |
| BCDCFSQCC: "bcdcfsq.", |
| BCDCFZCC: "bcdcfz.", |
| BCDCPSGNCC: "bcdcpsgn.", |
| BCDCTNCC: "bcdctn.", |
| BCDCTSQCC: "bcdctsq.", |
| BCDCTZCC: "bcdctz.", |
| BCDSCC: "bcds.", |
| BCDSETSGNCC: "bcdsetsgn.", |
| BCDSRCC: "bcdsr.", |
| BCDTRUNCCC: "bcdtrunc.", |
| BCDUSCC: "bcdus.", |
| BCDUTRUNCCC: "bcdutrunc.", |
| CMPEQB: "cmpeqb", |
| CMPRB: "cmprb", |
| CNTTZD: "cnttzd", |
| CNTTZDCC: "cnttzd.", |
| CNTTZW: "cnttzw", |
| CNTTZWCC: "cnttzw.", |
| COPY: "copy", |
| CPABORT: "cpabort", |
| DARN: "darn", |
| DTSTSFI: "dtstsfi", |
| DTSTSFIQ: "dtstsfiq", |
| EXTSWSLI: "extswsli", |
| EXTSWSLICC: "extswsli.", |
| LDAT: "ldat", |
| LWAT: "lwat", |
| LXSD: "lxsd", |
| LXSIBZX: "lxsibzx", |
| LXSIHZX: "lxsihzx", |
| LXSSP: "lxssp", |
| LXV: "lxv", |
| LXVB16X: "lxvb16x", |
| LXVH8X: "lxvh8x", |
| LXVL: "lxvl", |
| LXVLL: "lxvll", |
| LXVWSX: "lxvwsx", |
| LXVX: "lxvx", |
| MADDHD: "maddhd", |
| MADDHDU: "maddhdu", |
| MADDLD: "maddld", |
| MCRXRX: "mcrxrx", |
| MFVSRLD: "mfvsrld", |
| MODSD: "modsd", |
| MODSW: "modsw", |
| MODUD: "modud", |
| MODUW: "moduw", |
| MSGSYNC: "msgsync", |
| MTVSRDD: "mtvsrdd", |
| MTVSRWS: "mtvsrws", |
| PASTECC: "paste.", |
| SETB: "setb", |
| SLBIEG: "slbieg", |
| SLBSYNC: "slbsync", |
| STDAT: "stdat", |
| STOP: "stop", |
| STWAT: "stwat", |
| STXSD: "stxsd", |
| STXSIBX: "stxsibx", |
| STXSIHX: "stxsihx", |
| STXSSP: "stxssp", |
| STXV: "stxv", |
| STXVB16X: "stxvb16x", |
| STXVH8X: "stxvh8x", |
| STXVL: "stxvl", |
| STXVLL: "stxvll", |
| STXVX: "stxvx", |
| VABSDUB: "vabsdub", |
| VABSDUH: "vabsduh", |
| VABSDUW: "vabsduw", |
| VBPERMD: "vbpermd", |
| VCLZLSBB: "vclzlsbb", |
| VCMPNEB: "vcmpneb", |
| VCMPNEBCC: "vcmpneb.", |
| VCMPNEH: "vcmpneh", |
| VCMPNEHCC: "vcmpneh.", |
| VCMPNEW: "vcmpnew", |
| VCMPNEWCC: "vcmpnew.", |
| VCMPNEZB: "vcmpnezb", |
| VCMPNEZBCC: "vcmpnezb.", |
| VCMPNEZH: "vcmpnezh", |
| VCMPNEZHCC: "vcmpnezh.", |
| VCMPNEZW: "vcmpnezw", |
| VCMPNEZWCC: "vcmpnezw.", |
| VCTZB: "vctzb", |
| VCTZD: "vctzd", |
| VCTZH: "vctzh", |
| VCTZLSBB: "vctzlsbb", |
| VCTZW: "vctzw", |
| VEXTRACTD: "vextractd", |
| VEXTRACTUB: "vextractub", |
| VEXTRACTUH: "vextractuh", |
| VEXTRACTUW: "vextractuw", |
| VEXTSB2D: "vextsb2d", |
| VEXTSB2W: "vextsb2w", |
| VEXTSH2D: "vextsh2d", |
| VEXTSH2W: "vextsh2w", |
| VEXTSW2D: "vextsw2d", |
| VEXTUBLX: "vextublx", |
| VEXTUBRX: "vextubrx", |
| VEXTUHLX: "vextuhlx", |
| VEXTUHRX: "vextuhrx", |
| VEXTUWLX: "vextuwlx", |
| VEXTUWRX: "vextuwrx", |
| VINSERTB: "vinsertb", |
| VINSERTD: "vinsertd", |
| VINSERTH: "vinserth", |
| VINSERTW: "vinsertw", |
| VMUL10CUQ: "vmul10cuq", |
| VMUL10ECUQ: "vmul10ecuq", |
| VMUL10EUQ: "vmul10euq", |
| VMUL10UQ: "vmul10uq", |
| VNEGD: "vnegd", |
| VNEGW: "vnegw", |
| VPERMR: "vpermr", |
| VPRTYBD: "vprtybd", |
| VPRTYBQ: "vprtybq", |
| VPRTYBW: "vprtybw", |
| VRLDMI: "vrldmi", |
| VRLDNM: "vrldnm", |
| VRLWMI: "vrlwmi", |
| VRLWNM: "vrlwnm", |
| VSLV: "vslv", |
| VSRV: "vsrv", |
| WAIT: "wait", |
| XSABSQP: "xsabsqp", |
| XSADDQP: "xsaddqp", |
| XSADDQPO: "xsaddqpo", |
| XSCMPEQDP: "xscmpeqdp", |
| XSCMPEXPDP: "xscmpexpdp", |
| XSCMPEXPQP: "xscmpexpqp", |
| XSCMPGEDP: "xscmpgedp", |
| XSCMPGTDP: "xscmpgtdp", |
| XSCMPOQP: "xscmpoqp", |
| XSCMPUQP: "xscmpuqp", |
| XSCPSGNQP: "xscpsgnqp", |
| XSCVDPHP: "xscvdphp", |
| XSCVDPQP: "xscvdpqp", |
| XSCVHPDP: "xscvhpdp", |
| XSCVQPDP: "xscvqpdp", |
| XSCVQPDPO: "xscvqpdpo", |
| XSCVQPSDZ: "xscvqpsdz", |
| XSCVQPSWZ: "xscvqpswz", |
| XSCVQPUDZ: "xscvqpudz", |
| XSCVQPUWZ: "xscvqpuwz", |
| XSCVSDQP: "xscvsdqp", |
| XSCVUDQP: "xscvudqp", |
| XSDIVQP: "xsdivqp", |
| XSDIVQPO: "xsdivqpo", |
| XSIEXPDP: "xsiexpdp", |
| XSIEXPQP: "xsiexpqp", |
| XSMADDQP: "xsmaddqp", |
| XSMADDQPO: "xsmaddqpo", |
| XSMAXCDP: "xsmaxcdp", |
| XSMAXJDP: "xsmaxjdp", |
| XSMINCDP: "xsmincdp", |
| XSMINJDP: "xsminjdp", |
| XSMSUBQP: "xsmsubqp", |
| XSMSUBQPO: "xsmsubqpo", |
| XSMULQP: "xsmulqp", |
| XSMULQPO: "xsmulqpo", |
| XSNABSQP: "xsnabsqp", |
| XSNEGQP: "xsnegqp", |
| XSNMADDQP: "xsnmaddqp", |
| XSNMADDQPO: "xsnmaddqpo", |
| XSNMSUBQP: "xsnmsubqp", |
| XSNMSUBQPO: "xsnmsubqpo", |
| XSRQPI: "xsrqpi", |
| XSRQPIX: "xsrqpix", |
| XSRQPXP: "xsrqpxp", |
| XSSQRTQP: "xssqrtqp", |
| XSSQRTQPO: "xssqrtqpo", |
| XSSUBQP: "xssubqp", |
| XSSUBQPO: "xssubqpo", |
| XSTSTDCDP: "xststdcdp", |
| XSTSTDCQP: "xststdcqp", |
| XSTSTDCSP: "xststdcsp", |
| XSXEXPDP: "xsxexpdp", |
| XSXEXPQP: "xsxexpqp", |
| XSXSIGDP: "xsxsigdp", |
| XSXSIGQP: "xsxsigqp", |
| XVCVHPSP: "xvcvhpsp", |
| XVCVSPHP: "xvcvsphp", |
| XVIEXPDP: "xviexpdp", |
| XVIEXPSP: "xviexpsp", |
| XVTSTDCDP: "xvtstdcdp", |
| XVTSTDCSP: "xvtstdcsp", |
| XVXEXPDP: "xvxexpdp", |
| XVXEXPSP: "xvxexpsp", |
| XVXSIGDP: "xvxsigdp", |
| XVXSIGSP: "xvxsigsp", |
| XXBRD: "xxbrd", |
| XXBRH: "xxbrh", |
| XXBRQ: "xxbrq", |
| XXBRW: "xxbrw", |
| XXEXTRACTUW: "xxextractuw", |
| XXINSERTW: "xxinsertw", |
| XXPERM: "xxperm", |
| XXPERMR: "xxpermr", |
| XXSPLTIB: "xxspltib", |
| BCDADDCC: "bcdadd.", |
| BCDSUBCC: "bcdsub.", |
| BCTAR: "bctar", |
| BCTARL: "bctarl", |
| CLRBHRB: "clrbhrb", |
| FMRGEW: "fmrgew", |
| FMRGOW: "fmrgow", |
| ICBT: "icbt", |
| LQARX: "lqarx", |
| LXSIWAX: "lxsiwax", |
| LXSIWZX: "lxsiwzx", |
| LXSSPX: "lxsspx", |
| MFBHRBE: "mfbhrbe", |
| MFVSRD: "mfvsrd", |
| MFVSRWZ: "mfvsrwz", |
| MSGCLR: "msgclr", |
| MSGCLRP: "msgclrp", |
| MSGSND: "msgsnd", |
| MSGSNDP: "msgsndp", |
| MTVSRD: "mtvsrd", |
| MTVSRWA: "mtvsrwa", |
| MTVSRWZ: "mtvsrwz", |
| RFEBB: "rfebb", |
| STQCXCC: "stqcx.", |
| STXSIWX: "stxsiwx", |
| STXSSPX: "stxsspx", |
| VADDCUQ: "vaddcuq", |
| VADDECUQ: "vaddecuq", |
| VADDEUQM: "vaddeuqm", |
| VADDUDM: "vaddudm", |
| VADDUQM: "vadduqm", |
| VBPERMQ: "vbpermq", |
| VCIPHER: "vcipher", |
| VCIPHERLAST: "vcipherlast", |
| VCLZB: "vclzb", |
| VCLZD: "vclzd", |
| VCLZH: "vclzh", |
| VCLZW: "vclzw", |
| VCMPEQUD: "vcmpequd", |
| VCMPEQUDCC: "vcmpequd.", |
| VCMPGTSD: "vcmpgtsd", |
| VCMPGTSDCC: "vcmpgtsd.", |
| VCMPGTUD: "vcmpgtud", |
| VCMPGTUDCC: "vcmpgtud.", |
| VEQV: "veqv", |
| VGBBD: "vgbbd", |
| VMAXSD: "vmaxsd", |
| VMAXUD: "vmaxud", |
| VMINSD: "vminsd", |
| VMINUD: "vminud", |
| VMRGEW: "vmrgew", |
| VMRGOW: "vmrgow", |
| VMULESW: "vmulesw", |
| VMULEUW: "vmuleuw", |
| VMULOSW: "vmulosw", |
| VMULOUW: "vmulouw", |
| VMULUWM: "vmuluwm", |
| VNAND: "vnand", |
| VNCIPHER: "vncipher", |
| VNCIPHERLAST: "vncipherlast", |
| VORC: "vorc", |
| VPERMXOR: "vpermxor", |
| VPKSDSS: "vpksdss", |
| VPKSDUS: "vpksdus", |
| VPKUDUM: "vpkudum", |
| VPKUDUS: "vpkudus", |
| VPMSUMB: "vpmsumb", |
| VPMSUMD: "vpmsumd", |
| VPMSUMH: "vpmsumh", |
| VPMSUMW: "vpmsumw", |
| VPOPCNTB: "vpopcntb", |
| VPOPCNTD: "vpopcntd", |
| VPOPCNTH: "vpopcnth", |
| VPOPCNTW: "vpopcntw", |
| VRLD: "vrld", |
| VSBOX: "vsbox", |
| VSHASIGMAD: "vshasigmad", |
| VSHASIGMAW: "vshasigmaw", |
| VSLD: "vsld", |
| VSRAD: "vsrad", |
| VSRD: "vsrd", |
| VSUBCUQ: "vsubcuq", |
| VSUBECUQ: "vsubecuq", |
| VSUBEUQM: "vsubeuqm", |
| VSUBUDM: "vsubudm", |
| VSUBUQM: "vsubuqm", |
| VUPKHSW: "vupkhsw", |
| VUPKLSW: "vupklsw", |
| XSADDSP: "xsaddsp", |
| XSCVDPSPN: "xscvdpspn", |
| XSCVSPDPN: "xscvspdpn", |
| XSCVSXDSP: "xscvsxdsp", |
| XSCVUXDSP: "xscvuxdsp", |
| XSDIVSP: "xsdivsp", |
| XSMADDASP: "xsmaddasp", |
| XSMADDMSP: "xsmaddmsp", |
| XSMSUBASP: "xsmsubasp", |
| XSMSUBMSP: "xsmsubmsp", |
| XSMULSP: "xsmulsp", |
| XSNMADDASP: "xsnmaddasp", |
| XSNMADDMSP: "xsnmaddmsp", |
| XSNMSUBASP: "xsnmsubasp", |
| XSNMSUBMSP: "xsnmsubmsp", |
| XSRESP: "xsresp", |
| XSRSP: "xsrsp", |
| XSRSQRTESP: "xsrsqrtesp", |
| XSSQRTSP: "xssqrtsp", |
| XSSUBSP: "xssubsp", |
| XXLEQV: "xxleqv", |
| XXLNAND: "xxlnand", |
| XXLORC: "xxlorc", |
| ADDG6S: "addg6s", |
| BPERMD: "bpermd", |
| CBCDTD: "cbcdtd", |
| CDTBCD: "cdtbcd", |
| DCFFIX: "dcffix", |
| DCFFIXCC: "dcffix.", |
| DIVDE: "divde", |
| DIVDECC: "divde.", |
| DIVDEO: "divdeo", |
| DIVDEOCC: "divdeo.", |
| DIVDEU: "divdeu", |
| DIVDEUCC: "divdeu.", |
| DIVDEUO: "divdeuo", |
| DIVDEUOCC: "divdeuo.", |
| DIVWE: "divwe", |
| DIVWECC: "divwe.", |
| DIVWEO: "divweo", |
| DIVWEOCC: "divweo.", |
| DIVWEU: "divweu", |
| DIVWEUCC: "divweu.", |
| DIVWEUO: "divweuo", |
| DIVWEUOCC: "divweuo.", |
| FCFIDS: "fcfids", |
| FCFIDSCC: "fcfids.", |
| FCFIDU: "fcfidu", |
| FCFIDUCC: "fcfidu.", |
| FCFIDUS: "fcfidus", |
| FCFIDUSCC: "fcfidus.", |
| FCTIDU: "fctidu", |
| FCTIDUCC: "fctidu.", |
| FCTIDUZ: "fctiduz", |
| FCTIDUZCC: "fctiduz.", |
| FCTIWU: "fctiwu", |
| FCTIWUCC: "fctiwu.", |
| FCTIWUZ: "fctiwuz", |
| FCTIWUZCC: "fctiwuz.", |
| FTDIV: "ftdiv", |
| FTSQRT: "ftsqrt", |
| LBARX: "lbarx", |
| LDBRX: "ldbrx", |
| LFIWZX: "lfiwzx", |
| LHARX: "lharx", |
| LXSDX: "lxsdx", |
| LXVD2X: "lxvd2x", |
| LXVDSX: "lxvdsx", |
| LXVW4X: "lxvw4x", |
| POPCNTD: "popcntd", |
| POPCNTW: "popcntw", |
| STBCXCC: "stbcx.", |
| STDBRX: "stdbrx", |
| STHCXCC: "sthcx.", |
| STXSDX: "stxsdx", |
| STXVD2X: "stxvd2x", |
| STXVW4X: "stxvw4x", |
| XSABSDP: "xsabsdp", |
| XSADDDP: "xsadddp", |
| XSCMPODP: "xscmpodp", |
| XSCMPUDP: "xscmpudp", |
| XSCPSGNDP: "xscpsgndp", |
| XSCVDPSP: "xscvdpsp", |
| XSCVDPSXDS: "xscvdpsxds", |
| XSCVDPSXWS: "xscvdpsxws", |
| XSCVDPUXDS: "xscvdpuxds", |
| XSCVDPUXWS: "xscvdpuxws", |
| XSCVSPDP: "xscvspdp", |
| XSCVSXDDP: "xscvsxddp", |
| XSCVUXDDP: "xscvuxddp", |
| XSDIVDP: "xsdivdp", |
| XSMADDADP: "xsmaddadp", |
| XSMADDMDP: "xsmaddmdp", |
| XSMAXDP: "xsmaxdp", |
| XSMINDP: "xsmindp", |
| XSMSUBADP: "xsmsubadp", |
| XSMSUBMDP: "xsmsubmdp", |
| XSMULDP: "xsmuldp", |
| XSNABSDP: "xsnabsdp", |
| XSNEGDP: "xsnegdp", |
| XSNMADDADP: "xsnmaddadp", |
| XSNMADDMDP: "xsnmaddmdp", |
| XSNMSUBADP: "xsnmsubadp", |
| XSNMSUBMDP: "xsnmsubmdp", |
| XSRDPI: "xsrdpi", |
| XSRDPIC: "xsrdpic", |
| XSRDPIM: "xsrdpim", |
| XSRDPIP: "xsrdpip", |
| XSRDPIZ: "xsrdpiz", |
| XSREDP: "xsredp", |
| XSRSQRTEDP: "xsrsqrtedp", |
| XSSQRTDP: "xssqrtdp", |
| XSSUBDP: "xssubdp", |
| XSTDIVDP: "xstdivdp", |
| XSTSQRTDP: "xstsqrtdp", |
| XVABSDP: "xvabsdp", |
| XVABSSP: "xvabssp", |
| XVADDDP: "xvadddp", |
| XVADDSP: "xvaddsp", |
| XVCMPEQDP: "xvcmpeqdp", |
| XVCMPEQDPCC: "xvcmpeqdp.", |
| XVCMPEQSP: "xvcmpeqsp", |
| XVCMPEQSPCC: "xvcmpeqsp.", |
| XVCMPGEDP: "xvcmpgedp", |
| XVCMPGEDPCC: "xvcmpgedp.", |
| XVCMPGESP: "xvcmpgesp", |
| XVCMPGESPCC: "xvcmpgesp.", |
| XVCMPGTDP: "xvcmpgtdp", |
| XVCMPGTDPCC: "xvcmpgtdp.", |
| XVCMPGTSP: "xvcmpgtsp", |
| XVCMPGTSPCC: "xvcmpgtsp.", |
| XVCPSGNDP: "xvcpsgndp", |
| XVCPSGNSP: "xvcpsgnsp", |
| XVCVDPSP: "xvcvdpsp", |
| XVCVDPSXDS: "xvcvdpsxds", |
| XVCVDPSXWS: "xvcvdpsxws", |
| XVCVDPUXDS: "xvcvdpuxds", |
| XVCVDPUXWS: "xvcvdpuxws", |
| XVCVSPDP: "xvcvspdp", |
| XVCVSPSXDS: "xvcvspsxds", |
| XVCVSPSXWS: "xvcvspsxws", |
| XVCVSPUXDS: "xvcvspuxds", |
| XVCVSPUXWS: "xvcvspuxws", |
| XVCVSXDDP: "xvcvsxddp", |
| XVCVSXDSP: "xvcvsxdsp", |
| XVCVSXWDP: "xvcvsxwdp", |
| XVCVSXWSP: "xvcvsxwsp", |
| XVCVUXDDP: "xvcvuxddp", |
| XVCVUXDSP: "xvcvuxdsp", |
| XVCVUXWDP: "xvcvuxwdp", |
| XVCVUXWSP: "xvcvuxwsp", |
| XVDIVDP: "xvdivdp", |
| XVDIVSP: "xvdivsp", |
| XVMADDADP: "xvmaddadp", |
| XVMADDASP: "xvmaddasp", |
| XVMADDMDP: "xvmaddmdp", |
| XVMADDMSP: "xvmaddmsp", |
| XVMAXDP: "xvmaxdp", |
| XVMAXSP: "xvmaxsp", |
| XVMINDP: "xvmindp", |
| XVMINSP: "xvminsp", |
| XVMSUBADP: "xvmsubadp", |
| XVMSUBASP: "xvmsubasp", |
| XVMSUBMDP: "xvmsubmdp", |
| XVMSUBMSP: "xvmsubmsp", |
| XVMULDP: "xvmuldp", |
| XVMULSP: "xvmulsp", |
| XVNABSDP: "xvnabsdp", |
| XVNABSSP: "xvnabssp", |
| XVNEGDP: "xvnegdp", |
| XVNEGSP: "xvnegsp", |
| XVNMADDADP: "xvnmaddadp", |
| XVNMADDASP: "xvnmaddasp", |
| XVNMADDMDP: "xvnmaddmdp", |
| XVNMADDMSP: "xvnmaddmsp", |
| XVNMSUBADP: "xvnmsubadp", |
| XVNMSUBASP: "xvnmsubasp", |
| XVNMSUBMDP: "xvnmsubmdp", |
| XVNMSUBMSP: "xvnmsubmsp", |
| XVRDPI: "xvrdpi", |
| XVRDPIC: "xvrdpic", |
| XVRDPIM: "xvrdpim", |
| XVRDPIP: "xvrdpip", |
| XVRDPIZ: "xvrdpiz", |
| XVREDP: "xvredp", |
| XVRESP: "xvresp", |
| XVRSPI: "xvrspi", |
| XVRSPIC: "xvrspic", |
| XVRSPIM: "xvrspim", |
| XVRSPIP: "xvrspip", |
| XVRSPIZ: "xvrspiz", |
| XVRSQRTEDP: "xvrsqrtedp", |
| XVRSQRTESP: "xvrsqrtesp", |
| XVSQRTDP: "xvsqrtdp", |
| XVSQRTSP: "xvsqrtsp", |
| XVSUBDP: "xvsubdp", |
| XVSUBSP: "xvsubsp", |
| XVTDIVDP: "xvtdivdp", |
| XVTDIVSP: "xvtdivsp", |
| XVTSQRTDP: "xvtsqrtdp", |
| XVTSQRTSP: "xvtsqrtsp", |
| XXLAND: "xxland", |
| XXLANDC: "xxlandc", |
| XXLNOR: "xxlnor", |
| XXLOR: "xxlor", |
| XXLXOR: "xxlxor", |
| XXMRGHW: "xxmrghw", |
| XXMRGLW: "xxmrglw", |
| XXPERMDI: "xxpermdi", |
| XXSEL: "xxsel", |
| XXSLDWI: "xxsldwi", |
| XXSPLTW: "xxspltw", |
| CMPB: "cmpb", |
| DADD: "dadd", |
| DADDCC: "dadd.", |
| DADDQ: "daddq", |
| DADDQCC: "daddq.", |
| DCFFIXQ: "dcffixq", |
| DCFFIXQCC: "dcffixq.", |
| DCMPO: "dcmpo", |
| DCMPOQ: "dcmpoq", |
| DCMPU: "dcmpu", |
| DCMPUQ: "dcmpuq", |
| DCTDP: "dctdp", |
| DCTDPCC: "dctdp.", |
| DCTFIX: "dctfix", |
| DCTFIXCC: "dctfix.", |
| DCTFIXQ: "dctfixq", |
| DCTFIXQCC: "dctfixq.", |
| DCTQPQ: "dctqpq", |
| DCTQPQCC: "dctqpq.", |
| DDEDPD: "ddedpd", |
| DDEDPDCC: "ddedpd.", |
| DDEDPDQ: "ddedpdq", |
| DDEDPDQCC: "ddedpdq.", |
| DDIV: "ddiv", |
| DDIVCC: "ddiv.", |
| DDIVQ: "ddivq", |
| DDIVQCC: "ddivq.", |
| DENBCD: "denbcd", |
| DENBCDCC: "denbcd.", |
| DENBCDQ: "denbcdq", |
| DENBCDQCC: "denbcdq.", |
| DIEX: "diex", |
| DIEXCC: "diex.", |
| DIEXQCC: "diexq.", |
| DIEXQ: "diexq", |
| DMUL: "dmul", |
| DMULCC: "dmul.", |
| DMULQ: "dmulq", |
| DMULQCC: "dmulq.", |
| DQUA: "dqua", |
| DQUACC: "dqua.", |
| DQUAI: "dquai", |
| DQUAICC: "dquai.", |
| DQUAIQ: "dquaiq", |
| DQUAIQCC: "dquaiq.", |
| DQUAQ: "dquaq", |
| DQUAQCC: "dquaq.", |
| DRDPQ: "drdpq", |
| DRDPQCC: "drdpq.", |
| DRINTN: "drintn", |
| DRINTNCC: "drintn.", |
| DRINTNQ: "drintnq", |
| DRINTNQCC: "drintnq.", |
| DRINTX: "drintx", |
| DRINTXCC: "drintx.", |
| DRINTXQ: "drintxq", |
| DRINTXQCC: "drintxq.", |
| DRRND: "drrnd", |
| DRRNDCC: "drrnd.", |
| DRRNDQ: "drrndq", |
| DRRNDQCC: "drrndq.", |
| DRSP: "drsp", |
| DRSPCC: "drsp.", |
| DSCLI: "dscli", |
| DSCLICC: "dscli.", |
| DSCLIQ: "dscliq", |
| DSCLIQCC: "dscliq.", |
| DSCRI: "dscri", |
| DSCRICC: "dscri.", |
| DSCRIQ: "dscriq", |
| DSCRIQCC: "dscriq.", |
| DSUB: "dsub", |
| DSUBCC: "dsub.", |
| DSUBQ: "dsubq", |
| DSUBQCC: "dsubq.", |
| DTSTDC: "dtstdc", |
| DTSTDCQ: "dtstdcq", |
| DTSTDG: "dtstdg", |
| DTSTDGQ: "dtstdgq", |
| DTSTEX: "dtstex", |
| DTSTEXQ: "dtstexq", |
| DTSTSF: "dtstsf", |
| DTSTSFQ: "dtstsfq", |
| DXEX: "dxex", |
| DXEXCC: "dxex.", |
| DXEXQ: "dxexq", |
| DXEXQCC: "dxexq.", |
| FCPSGN: "fcpsgn", |
| FCPSGNCC: "fcpsgn.", |
| LBZCIX: "lbzcix", |
| LDCIX: "ldcix", |
| LFDP: "lfdp", |
| LFDPX: "lfdpx", |
| LFIWAX: "lfiwax", |
| LHZCIX: "lhzcix", |
| LWZCIX: "lwzcix", |
| PRTYD: "prtyd", |
| PRTYW: "prtyw", |
| SLBFEECC: "slbfee.", |
| STBCIX: "stbcix", |
| STDCIX: "stdcix", |
| STFDP: "stfdp", |
| STFDPX: "stfdpx", |
| STHCIX: "sthcix", |
| STWCIX: "stwcix", |
| ISEL: "isel", |
| LVEBX: "lvebx", |
| LVEHX: "lvehx", |
| LVEWX: "lvewx", |
| LVSL: "lvsl", |
| LVSR: "lvsr", |
| LVX: "lvx", |
| LVXL: "lvxl", |
| MFVSCR: "mfvscr", |
| MTVSCR: "mtvscr", |
| STVEBX: "stvebx", |
| STVEHX: "stvehx", |
| STVEWX: "stvewx", |
| STVX: "stvx", |
| STVXL: "stvxl", |
| TLBIEL: "tlbiel", |
| VADDCUW: "vaddcuw", |
| VADDFP: "vaddfp", |
| VADDSBS: "vaddsbs", |
| VADDSHS: "vaddshs", |
| VADDSWS: "vaddsws", |
| VADDUBM: "vaddubm", |
| VADDUBS: "vaddubs", |
| VADDUHM: "vadduhm", |
| VADDUHS: "vadduhs", |
| VADDUWM: "vadduwm", |
| VADDUWS: "vadduws", |
| VAND: "vand", |
| VANDC: "vandc", |
| VAVGSB: "vavgsb", |
| VAVGSH: "vavgsh", |
| VAVGSW: "vavgsw", |
| VAVGUB: "vavgub", |
| VAVGUH: "vavguh", |
| VAVGUW: "vavguw", |
| VCFSX: "vcfsx", |
| VCFUX: "vcfux", |
| VCMPBFP: "vcmpbfp", |
| VCMPBFPCC: "vcmpbfp.", |
| VCMPEQFP: "vcmpeqfp", |
| VCMPEQFPCC: "vcmpeqfp.", |
| VCMPEQUB: "vcmpequb", |
| VCMPEQUBCC: "vcmpequb.", |
| VCMPEQUH: "vcmpequh", |
| VCMPEQUHCC: "vcmpequh.", |
| VCMPEQUW: "vcmpequw", |
| VCMPEQUWCC: "vcmpequw.", |
| VCMPGEFP: "vcmpgefp", |
| VCMPGEFPCC: "vcmpgefp.", |
| VCMPGTFP: "vcmpgtfp", |
| VCMPGTFPCC: "vcmpgtfp.", |
| VCMPGTSB: "vcmpgtsb", |
| VCMPGTSBCC: "vcmpgtsb.", |
| VCMPGTSH: "vcmpgtsh", |
| VCMPGTSHCC: "vcmpgtsh.", |
| VCMPGTSW: "vcmpgtsw", |
| VCMPGTSWCC: "vcmpgtsw.", |
| VCMPGTUB: "vcmpgtub", |
| VCMPGTUBCC: "vcmpgtub.", |
| VCMPGTUH: "vcmpgtuh", |
| VCMPGTUHCC: "vcmpgtuh.", |
| VCMPGTUW: "vcmpgtuw", |
| VCMPGTUWCC: "vcmpgtuw.", |
| VCTSXS: "vctsxs", |
| VCTUXS: "vctuxs", |
| VEXPTEFP: "vexptefp", |
| VLOGEFP: "vlogefp", |
| VMADDFP: "vmaddfp", |
| VMAXFP: "vmaxfp", |
| VMAXSB: "vmaxsb", |
| VMAXSH: "vmaxsh", |
| VMAXSW: "vmaxsw", |
| VMAXUB: "vmaxub", |
| VMAXUH: "vmaxuh", |
| VMAXUW: "vmaxuw", |
| VMHADDSHS: "vmhaddshs", |
| VMHRADDSHS: "vmhraddshs", |
| VMINFP: "vminfp", |
| VMINSB: "vminsb", |
| VMINSH: "vminsh", |
| VMINSW: "vminsw", |
| VMINUB: "vminub", |
| VMINUH: "vminuh", |
| VMINUW: "vminuw", |
| VMLADDUHM: "vmladduhm", |
| VMRGHB: "vmrghb", |
| VMRGHH: "vmrghh", |
| VMRGHW: "vmrghw", |
| VMRGLB: "vmrglb", |
| VMRGLH: "vmrglh", |
| VMRGLW: "vmrglw", |
| VMSUMMBM: "vmsummbm", |
| VMSUMSHM: "vmsumshm", |
| VMSUMSHS: "vmsumshs", |
| VMSUMUBM: "vmsumubm", |
| VMSUMUHM: "vmsumuhm", |
| VMSUMUHS: "vmsumuhs", |
| VMULESB: "vmulesb", |
| VMULESH: "vmulesh", |
| VMULEUB: "vmuleub", |
| VMULEUH: "vmuleuh", |
| VMULOSB: "vmulosb", |
| VMULOSH: "vmulosh", |
| VMULOUB: "vmuloub", |
| VMULOUH: "vmulouh", |
| VNMSUBFP: "vnmsubfp", |
| VNOR: "vnor", |
| VOR: "vor", |
| VPERM: "vperm", |
| VPKPX: "vpkpx", |
| VPKSHSS: "vpkshss", |
| VPKSHUS: "vpkshus", |
| VPKSWSS: "vpkswss", |
| VPKSWUS: "vpkswus", |
| VPKUHUM: "vpkuhum", |
| VPKUHUS: "vpkuhus", |
| VPKUWUM: "vpkuwum", |
| VPKUWUS: "vpkuwus", |
| VREFP: "vrefp", |
| VRFIM: "vrfim", |
| VRFIN: "vrfin", |
| VRFIP: "vrfip", |
| VRFIZ: "vrfiz", |
| VRLB: "vrlb", |
| VRLH: "vrlh", |
| VRLW: "vrlw", |
| VRSQRTEFP: "vrsqrtefp", |
| VSEL: "vsel", |
| VSL: "vsl", |
| VSLB: "vslb", |
| VSLDOI: "vsldoi", |
| VSLH: "vslh", |
| VSLO: "vslo", |
| VSLW: "vslw", |
| VSPLTB: "vspltb", |
| VSPLTH: "vsplth", |
| VSPLTISB: "vspltisb", |
| VSPLTISH: "vspltish", |
| VSPLTISW: "vspltisw", |
| VSPLTW: "vspltw", |
| VSR: "vsr", |
| VSRAB: "vsrab", |
| VSRAH: "vsrah", |
| VSRAW: "vsraw", |
| VSRB: "vsrb", |
| VSRH: "vsrh", |
| VSRO: "vsro", |
| VSRW: "vsrw", |
| VSUBCUW: "vsubcuw", |
| VSUBFP: "vsubfp", |
| VSUBSBS: "vsubsbs", |
| VSUBSHS: "vsubshs", |
| VSUBSWS: "vsubsws", |
| VSUBUBM: "vsububm", |
| VSUBUBS: "vsububs", |
| VSUBUHM: "vsubuhm", |
| VSUBUHS: "vsubuhs", |
| VSUBUWM: "vsubuwm", |
| VSUBUWS: "vsubuws", |
| VSUM2SWS: "vsum2sws", |
| VSUM4SBS: "vsum4sbs", |
| VSUM4SHS: "vsum4shs", |
| VSUM4UBS: "vsum4ubs", |
| VSUMSWS: "vsumsws", |
| VUPKHPX: "vupkhpx", |
| VUPKHSB: "vupkhsb", |
| VUPKHSH: "vupkhsh", |
| VUPKLPX: "vupklpx", |
| VUPKLSB: "vupklsb", |
| VUPKLSH: "vupklsh", |
| VXOR: "vxor", |
| FRE: "fre", |
| FRECC: "fre.", |
| FRIM: "frim", |
| FRIMCC: "frim.", |
| FRIN: "frin", |
| FRINCC: "frin.", |
| FRIP: "frip", |
| FRIPCC: "frip.", |
| FRIZ: "friz", |
| FRIZCC: "friz.", |
| FRSQRTES: "frsqrtes", |
| FRSQRTESCC: "frsqrtes.", |
| HRFID: "hrfid", |
| POPCNTB: "popcntb", |
| MFOCRF: "mfocrf", |
| MTOCRF: "mtocrf", |
| SLBMFEE: "slbmfee", |
| SLBMFEV: "slbmfev", |
| SLBMTE: "slbmte", |
| RFSCV: "rfscv", |
| SCV: "scv", |
| LQ: "lq", |
| STQ: "stq", |
| CNTLZD: "cntlzd", |
| CNTLZDCC: "cntlzd.", |
| DCBF: "dcbf", |
| DCBST: "dcbst", |
| DCBT: "dcbt", |
| DCBTST: "dcbtst", |
| DIVD: "divd", |
| DIVDCC: "divd.", |
| DIVDO: "divdo", |
| DIVDOCC: "divdo.", |
| DIVDU: "divdu", |
| DIVDUCC: "divdu.", |
| DIVDUO: "divduo", |
| DIVDUOCC: "divduo.", |
| DIVW: "divw", |
| DIVWCC: "divw.", |
| DIVWO: "divwo", |
| DIVWOCC: "divwo.", |
| DIVWU: "divwu", |
| DIVWUCC: "divwu.", |
| DIVWUO: "divwuo", |
| DIVWUOCC: "divwuo.", |
| EIEIO: "eieio", |
| EXTSB: "extsb", |
| EXTSBCC: "extsb.", |
| EXTSW: "extsw", |
| EXTSWCC: "extsw.", |
| FADDS: "fadds", |
| FADDSCC: "fadds.", |
| FCFID: "fcfid", |
| FCFIDCC: "fcfid.", |
| FCTID: "fctid", |
| FCTIDCC: "fctid.", |
| FCTIDZ: "fctidz", |
| FCTIDZCC: "fctidz.", |
| FDIVS: "fdivs", |
| FDIVSCC: "fdivs.", |
| FMADDS: "fmadds", |
| FMADDSCC: "fmadds.", |
| FMSUBS: "fmsubs", |
| FMSUBSCC: "fmsubs.", |
| FMULS: "fmuls", |
| FMULSCC: "fmuls.", |
| FNMADDS: "fnmadds", |
| FNMADDSCC: "fnmadds.", |
| FNMSUBS: "fnmsubs", |
| FNMSUBSCC: "fnmsubs.", |
| FRES: "fres", |
| FRESCC: "fres.", |
| FRSQRTE: "frsqrte", |
| FRSQRTECC: "frsqrte.", |
| FSEL: "fsel", |
| FSELCC: "fsel.", |
| FSQRTS: "fsqrts", |
| FSQRTSCC: "fsqrts.", |
| FSUBS: "fsubs", |
| FSUBSCC: "fsubs.", |
| ICBI: "icbi", |
| LD: "ld", |
| LDARX: "ldarx", |
| LDU: "ldu", |
| LDUX: "ldux", |
| LDX: "ldx", |
| LWA: "lwa", |
| LWARX: "lwarx", |
| LWAUX: "lwaux", |
| LWAX: "lwax", |
| MFTB: "mftb", |
| MTMSRD: "mtmsrd", |
| MULHD: "mulhd", |
| MULHDCC: "mulhd.", |
| MULHDU: "mulhdu", |
| MULHDUCC: "mulhdu.", |
| MULHW: "mulhw", |
| MULHWCC: "mulhw.", |
| MULHWU: "mulhwu", |
| MULHWUCC: "mulhwu.", |
| MULLD: "mulld", |
| MULLDCC: "mulld.", |
| MULLDO: "mulldo", |
| MULLDOCC: "mulldo.", |
| RFID: "rfid", |
| RLDCL: "rldcl", |
| RLDCLCC: "rldcl.", |
| RLDCR: "rldcr", |
| RLDCRCC: "rldcr.", |
| RLDIC: "rldic", |
| RLDICCC: "rldic.", |
| RLDICL: "rldicl", |
| RLDICLCC: "rldicl.", |
| RLDICR: "rldicr", |
| RLDICRCC: "rldicr.", |
| RLDIMI: "rldimi", |
| RLDIMICC: "rldimi.", |
| SC: "sc", |
| SLBIA: "slbia", |
| SLBIE: "slbie", |
| SLD: "sld", |
| SLDCC: "sld.", |
| SRAD: "srad", |
| SRADCC: "srad.", |
| SRADI: "sradi", |
| SRADICC: "sradi.", |
| SRD: "srd", |
| SRDCC: "srd.", |
| STD: "std", |
| STDCXCC: "stdcx.", |
| STDU: "stdu", |
| STDUX: "stdux", |
| STDX: "stdx", |
| STFIWX: "stfiwx", |
| STWCXCC: "stwcx.", |
| SUBF: "subf", |
| SUBFCC: "subf.", |
| SUBFO: "subfo", |
| SUBFOCC: "subfo.", |
| TD: "td", |
| TDI: "tdi", |
| TLBSYNC: "tlbsync", |
| FCTIW: "fctiw", |
| FCTIWCC: "fctiw.", |
| FCTIWZ: "fctiwz", |
| FCTIWZCC: "fctiwz.", |
| FSQRT: "fsqrt", |
| FSQRTCC: "fsqrt.", |
| ADD: "add", |
| ADDCC: "add.", |
| ADDO: "addo", |
| ADDOCC: "addo.", |
| ADDC: "addc", |
| ADDCCC: "addc.", |
| ADDCO: "addco", |
| ADDCOCC: "addco.", |
| ADDE: "adde", |
| ADDECC: "adde.", |
| ADDEO: "addeo", |
| ADDEOCC: "addeo.", |
| LI: "li", |
| ADDI: "addi", |
| ADDIC: "addic", |
| ADDICCC: "addic.", |
| LIS: "lis", |
| ADDIS: "addis", |
| ADDME: "addme", |
| ADDMECC: "addme.", |
| ADDMEO: "addmeo", |
| ADDMEOCC: "addmeo.", |
| ADDZE: "addze", |
| ADDZECC: "addze.", |
| ADDZEO: "addzeo", |
| ADDZEOCC: "addzeo.", |
| AND: "and", |
| ANDCC: "and.", |
| ANDC: "andc", |
| ANDCCC: "andc.", |
| ANDICC: "andi.", |
| ANDISCC: "andis.", |
| B: "b", |
| BA: "ba", |
| BL: "bl", |
| BLA: "bla", |
| BC: "bc", |
| BCA: "bca", |
| BCL: "bcl", |
| BCLA: "bcla", |
| BCCTR: "bcctr", |
| BCCTRL: "bcctrl", |
| BCLR: "bclr", |
| BCLRL: "bclrl", |
| CMPW: "cmpw", |
| CMPD: "cmpd", |
| CMP: "cmp", |
| CMPWI: "cmpwi", |
| CMPDI: "cmpdi", |
| CMPI: "cmpi", |
| CMPLW: "cmplw", |
| CMPLD: "cmpld", |
| CMPL: "cmpl", |
| CMPLWI: "cmplwi", |
| CMPLDI: "cmpldi", |
| CMPLI: "cmpli", |
| CNTLZW: "cntlzw", |
| CNTLZWCC: "cntlzw.", |
| CRAND: "crand", |
| CRANDC: "crandc", |
| CREQV: "creqv", |
| CRNAND: "crnand", |
| CRNOR: "crnor", |
| CROR: "cror", |
| CRORC: "crorc", |
| CRXOR: "crxor", |
| DCBZ: "dcbz", |
| EQV: "eqv", |
| EQVCC: "eqv.", |
| EXTSH: "extsh", |
| EXTSHCC: "extsh.", |
| FABS: "fabs", |
| FABSCC: "fabs.", |
| FADD: "fadd", |
| FADDCC: "fadd.", |
| FCMPO: "fcmpo", |
| FCMPU: "fcmpu", |
| FDIV: "fdiv", |
| FDIVCC: "fdiv.", |
| FMADD: "fmadd", |
| FMADDCC: "fmadd.", |
| FMR: "fmr", |
| FMRCC: "fmr.", |
| FMSUB: "fmsub", |
| FMSUBCC: "fmsub.", |
| FMUL: "fmul", |
| FMULCC: "fmul.", |
| FNABS: "fnabs", |
| FNABSCC: "fnabs.", |
| FNEG: "fneg", |
| FNEGCC: "fneg.", |
| FNMADD: "fnmadd", |
| FNMADDCC: "fnmadd.", |
| FNMSUB: "fnmsub", |
| FNMSUBCC: "fnmsub.", |
| FRSP: "frsp", |
| FRSPCC: "frsp.", |
| FSUB: "fsub", |
| FSUBCC: "fsub.", |
| ISYNC: "isync", |
| LBZ: "lbz", |
| LBZU: "lbzu", |
| LBZUX: "lbzux", |
| LBZX: "lbzx", |
| LFD: "lfd", |
| LFDU: "lfdu", |
| LFDUX: "lfdux", |
| LFDX: "lfdx", |
| LFS: "lfs", |
| LFSU: "lfsu", |
| LFSUX: "lfsux", |
| LFSX: "lfsx", |
| LHA: "lha", |
| LHAU: "lhau", |
| LHAUX: "lhaux", |
| LHAX: "lhax", |
| LHBRX: "lhbrx", |
| LHZ: "lhz", |
| LHZU: "lhzu", |
| LHZUX: "lhzux", |
| LHZX: "lhzx", |
| LMW: "lmw", |
| LSWI: "lswi", |
| LSWX: "lswx", |
| LWBRX: "lwbrx", |
| LWZ: "lwz", |
| LWZU: "lwzu", |
| LWZUX: "lwzux", |
| LWZX: "lwzx", |
| MCRF: "mcrf", |
| MCRFS: "mcrfs", |
| MFCR: "mfcr", |
| MFFS: "mffs", |
| MFFSCC: "mffs.", |
| MFMSR: "mfmsr", |
| MFSPR: "mfspr", |
| MTCRF: "mtcrf", |
| MTFSB0: "mtfsb0", |
| MTFSB0CC: "mtfsb0.", |
| MTFSB1: "mtfsb1", |
| MTFSB1CC: "mtfsb1.", |
| MTFSF: "mtfsf", |
| MTFSFCC: "mtfsf.", |
| MTFSFI: "mtfsfi", |
| MTFSFICC: "mtfsfi.", |
| MTMSR: "mtmsr", |
| MTSPR: "mtspr", |
| MULLI: "mulli", |
| MULLW: "mullw", |
| MULLWCC: "mullw.", |
| MULLWO: "mullwo", |
| MULLWOCC: "mullwo.", |
| NAND: "nand", |
| NANDCC: "nand.", |
| NEG: "neg", |
| NEGCC: "neg.", |
| NEGO: "nego", |
| NEGOCC: "nego.", |
| NOR: "nor", |
| NORCC: "nor.", |
| OR: "or", |
| ORCC: "or.", |
| ORC: "orc", |
| ORCCC: "orc.", |
| NOP: "nop", |
| ORI: "ori", |
| ORIS: "oris", |
| RLWIMI: "rlwimi", |
| RLWIMICC: "rlwimi.", |
| RLWINM: "rlwinm", |
| RLWINMCC: "rlwinm.", |
| RLWNM: "rlwnm", |
| RLWNMCC: "rlwnm.", |
| SLW: "slw", |
| SLWCC: "slw.", |
| SRAW: "sraw", |
| SRAWCC: "sraw.", |
| SRAWI: "srawi", |
| SRAWICC: "srawi.", |
| SRW: "srw", |
| SRWCC: "srw.", |
| STB: "stb", |
| STBU: "stbu", |
| STBUX: "stbux", |
| STBX: "stbx", |
| STFD: "stfd", |
| STFDU: "stfdu", |
| STFDUX: "stfdux", |
| STFDX: "stfdx", |
| STFS: "stfs", |
| STFSU: "stfsu", |
| STFSUX: "stfsux", |
| STFSX: "stfsx", |
| STH: "sth", |
| STHBRX: "sthbrx", |
| STHU: "sthu", |
| STHUX: "sthux", |
| STHX: "sthx", |
| STMW: "stmw", |
| STSWI: "stswi", |
| STSWX: "stswx", |
| STW: "stw", |
| STWBRX: "stwbrx", |
| STWU: "stwu", |
| STWUX: "stwux", |
| STWX: "stwx", |
| SUBFC: "subfc", |
| SUBFCCC: "subfc.", |
| SUBFCO: "subfco", |
| SUBFCOCC: "subfco.", |
| SUBFE: "subfe", |
| SUBFECC: "subfe.", |
| SUBFEO: "subfeo", |
| SUBFEOCC: "subfeo.", |
| SUBFIC: "subfic", |
| SUBFME: "subfme", |
| SUBFMECC: "subfme.", |
| SUBFMEO: "subfmeo", |
| SUBFMEOCC: "subfmeo.", |
| SUBFZE: "subfze", |
| SUBFZECC: "subfze.", |
| SUBFZEO: "subfzeo", |
| SUBFZEOCC: "subfzeo.", |
| SYNC: "sync", |
| TLBIE: "tlbie", |
| TW: "tw", |
| TWI: "twi", |
| XOR: "xor", |
| XORCC: "xor.", |
| XORI: "xori", |
| XORIS: "xoris", |
| } |
| |
| var ( |
| ap_Reg_11_15 = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{11, 5, 0}}} |
| ap_Reg_6_10 = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{6, 5, 0}}} |
| ap_Reg_16_20 = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{16, 5, 0}}} |
| ap_FPReg_6_10 = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{6, 5, 0}}} |
| ap_VecReg_16_20 = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{16, 5, 0}}} |
| ap_VecReg_6_10 = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{6, 5, 0}}} |
| ap_FPReg_16_20 = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{16, 5, 0}}} |
| ap_VecSReg_31_31_6_10 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{31, 1, 0}, {6, 5, 0}}} |
| ap_ImmUnsigned_16_20 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 5, 0}}} |
| ap_VecSpReg_10_10_6_9 = &argField{Type: TypeVecSpReg, Shift: 0, BitFields: BitFields{{10, 1, 0}, {6, 4, 0}}} |
| ap_Offset_16_27_shift4 = &argField{Type: TypeOffset, Shift: 4, BitFields: BitFields{{16, 12, 0}}} |
| ap_ImmUnsigned_16_25_11_15_31_31 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 10, 0}, {11, 5, 0}, {31, 1, 0}}} |
| ap_Reg_38_42 = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{6, 5, 1}}} |
| ap_Reg_43_47 = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{11, 5, 1}}} |
| ap_ImmSigned_14_31_48_63 = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{14, 18, 0}, {16, 16, 1}}} |
| ap_ImmUnsigned_11_11 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 1, 0}}} |
| ap_Offset_14_31_48_63 = &argField{Type: TypeOffset, Shift: 0, BitFields: BitFields{{14, 18, 0}, {16, 16, 1}}} |
| ap_FPReg_38_42 = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{6, 5, 1}}} |
| ap_VecReg_38_42 = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{6, 5, 1}}} |
| ap_VecSReg_37_37_38_42 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{5, 1, 1}, {6, 5, 1}}} |
| ap_VecSpReg_42_42_38_41 = &argField{Type: TypeVecSpReg, Shift: 0, BitFields: BitFields{{10, 1, 1}, {6, 4, 1}}} |
| ap_MMAReg_38_40 = &argField{Type: TypeMMAReg, Shift: 0, BitFields: BitFields{{6, 3, 1}}} |
| ap_VecSReg_61_61_43_47 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{29, 1, 1}, {11, 5, 1}}} |
| ap_VecSReg_62_62_48_52 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{30, 1, 1}, {16, 5, 1}}} |
| ap_ImmUnsigned_24_27 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{24, 4, 0}}} |
| ap_ImmUnsigned_28_31 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{28, 4, 0}}} |
| ap_ImmUnsigned_16_17 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 2, 0}}} |
| ap_ImmUnsigned_28_29 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{28, 2, 0}}} |
| ap_ImmUnsigned_16_23 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 8, 0}}} |
| ap_ImmUnsigned_16_19 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 4, 0}}} |
| ap_CondRegBit_11_15 = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{11, 5, 0}}} |
| ap_VecReg_11_15 = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{11, 5, 0}}} |
| ap_CondRegField_6_8 = &argField{Type: TypeCondRegField, Shift: 0, BitFields: BitFields{{6, 3, 0}}} |
| ap_ImmUnsigned_15_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{15, 1, 0}}} |
| ap_Reg_21_25 = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{21, 5, 0}}} |
| ap_ImmUnsigned_13_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{13, 3, 0}}} |
| ap_ImmUnsigned_12_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 4, 0}}} |
| ap_VecReg_21_25 = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{21, 5, 0}}} |
| ap_ImmUnsigned_23_25 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{23, 3, 0}}} |
| ap_MMAReg_6_8 = &argField{Type: TypeMMAReg, Shift: 0, BitFields: BitFields{{6, 3, 0}}} |
| ap_VecSReg_29_29_11_15 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{29, 1, 0}, {11, 5, 0}}} |
| ap_VecSReg_30_30_16_20 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{30, 1, 0}, {16, 5, 0}}} |
| ap_VecSReg_63_63_38_42 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{31, 1, 1}, {6, 5, 1}}} |
| ap_VecSReg_60_60_53_57 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1, 1}, {21, 5, 1}}} |
| ap_ImmUnsigned_24_31 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{24, 8, 0}}} |
| ap_ImmUnsigned_11_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 5, 0}}} |
| ap_ImmUnsigned_29_31 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{29, 3, 0}}} |
| ap_VecSReg_47_47_38_42 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{15, 1, 1}, {6, 5, 1}}} |
| ap_ImmUnsigned_46_46 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{14, 1, 1}}} |
| ap_ImmUnsigned_16_31_48_63 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 16, 0}, {16, 16, 1}}} |
| ap_ImmUnsigned_21_22 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{21, 2, 0}}} |
| ap_ImmUnsigned_18_20 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{18, 3, 0}}} |
| ap_ImmUnsigned_19_20 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{19, 2, 0}}} |
| ap_ImmSigned_16_25_11_15_31_31 = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{16, 10, 0}, {11, 5, 0}, {31, 1, 0}}} |
| ap_ImmUnsigned_22_22 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 1, 0}}} |
| ap_ImmUnsigned_10_10 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{10, 1, 0}}} |
| ap_ImmUnsigned_14_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{14, 2, 0}}} |
| ap_ImmUnsigned_10_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{10, 6, 0}}} |
| ap_ImmUnsigned_30_30_16_20 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{30, 1, 0}, {16, 5, 0}}} |
| ap_Offset_16_29_shift2 = &argField{Type: TypeOffset, Shift: 2, BitFields: BitFields{{16, 14, 0}}} |
| ap_VecSReg_28_28_6_10 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1, 0}, {6, 5, 0}}} |
| ap_CondRegField_11_13 = &argField{Type: TypeCondRegField, Shift: 0, BitFields: BitFields{{11, 3, 0}}} |
| ap_ImmUnsigned_9_10 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{9, 2, 0}}} |
| ap_ImmUnsigned_9_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{9, 7, 0}}} |
| ap_ImmUnsigned_25_25_29_29_11_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{25, 1, 0}, {29, 1, 0}, {11, 5, 0}}} |
| ap_ImmUnsigned_13_20 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{13, 8, 0}}} |
| ap_ImmUnsigned_6_10 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 5, 0}}} |
| ap_FPReg_11_15 = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{11, 5, 0}}} |
| ap_ImmUnsigned_7_10 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{7, 4, 0}}} |
| ap_ImmUnsigned_31_31 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{31, 1, 0}}} |
| ap_ImmUnsigned_11_20 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 10, 0}}} |
| ap_ImmUnsigned_20_20 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{20, 1, 0}}} |
| ap_ImmUnsigned_16_16 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 1, 0}}} |
| ap_ImmUnsigned_17_20 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{17, 4, 0}}} |
| ap_ImmUnsigned_22_23 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 2, 0}}} |
| ap_VecSReg_28_28_21_25 = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1, 0}, {21, 5, 0}}} |
| ap_ImmUnsigned_11_12 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 2, 0}}} |
| ap_ImmSigned_11_15 = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{11, 5, 0}}} |
| ap_ImmUnsigned_16_21 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 6, 0}}} |
| ap_CondRegBit_21_25 = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{21, 5, 0}}} |
| ap_ImmUnsigned_12_13 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 2, 0}}} |
| ap_ImmUnsigned_14_14 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{14, 1, 0}}} |
| ap_ImmUnsigned_22_25 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 4, 0}}} |
| ap_ImmUnsigned_12_19 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 8, 0}}} |
| ap_ImmUnsigned_20_26 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{20, 7, 0}}} |
| ap_ImmUnsigned_8_10 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{8, 3, 0}}} |
| ap_FPReg_21_25 = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{21, 5, 0}}} |
| ap_SpReg_16_20_11_15 = &argField{Type: TypeSpReg, Shift: 0, BitFields: BitFields{{16, 5, 0}, {11, 5, 0}}} |
| ap_ImmUnsigned_26_26_21_25 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{26, 1, 0}, {21, 5, 0}}} |
| ap_ImmSigned_16_31 = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{16, 16, 0}}} |
| ap_ImmUnsigned_16_31 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 16, 0}}} |
| ap_PCRel_6_29_shift2 = &argField{Type: TypePCRel, Shift: 2, BitFields: BitFields{{6, 24, 0}}} |
| ap_Label_6_29_shift2 = &argField{Type: TypeLabel, Shift: 2, BitFields: BitFields{{6, 24, 0}}} |
| ap_PCRel_16_29_shift2 = &argField{Type: TypePCRel, Shift: 2, BitFields: BitFields{{16, 14, 0}}} |
| ap_Label_16_29_shift2 = &argField{Type: TypeLabel, Shift: 2, BitFields: BitFields{{16, 14, 0}}} |
| ap_CondRegBit_6_10 = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{6, 5, 0}}} |
| ap_CondRegBit_16_20 = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{16, 5, 0}}} |
| ap_Offset_16_31 = &argField{Type: TypeOffset, Shift: 0, BitFields: BitFields{{16, 16, 0}}} |
| ap_ImmUnsigned_7_14 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{7, 8, 0}}} |
| ap_ImmUnsigned_6_6 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 1, 0}}} |
| ap_ImmUnsigned_6_8 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 3, 0}}} |
| ap_ImmUnsigned_21_25 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{21, 5, 0}}} |
| ap_ImmUnsigned_26_30 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{26, 5, 0}}} |
| ) |
| |
| var instFormats = [...]instFormat{ |
| {BRD, 0xfc0007fe00000000, 0x7c00017600000000, 0xf80100000000, // Byte-Reverse Doubleword X-form (brd RA,RS) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10}}, |
| {BRH, 0xfc0007fe00000000, 0x7c0001b600000000, 0xf80100000000, // Byte-Reverse Halfword X-form (brh RA,RS) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10}}, |
| {BRW, 0xfc0007fe00000000, 0x7c00013600000000, 0xf80100000000, // Byte-Reverse Word X-form (brw RA,RS) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10}}, |
| {CFUGED, 0xfc0007fe00000000, 0x7c0001b800000000, 0x100000000, // Centrifuge Doubleword X-form (cfuged RA,RS,RB) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}}, |
| {CNTLZDM, 0xfc0007fe00000000, 0x7c00007600000000, 0x100000000, // Count Leading Zeros Doubleword under bit Mask X-form (cntlzdm RA,RS,RB) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}}, |
| {CNTTZDM, 0xfc0007fe00000000, 0x7c00047600000000, 0x100000000, // Count Trailing Zeros Doubleword under bit Mask X-form (cnttzdm RA,RS,RB) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}}, |
| {DCFFIXQQ, 0xfc1f07fe00000000, 0xfc0007c400000000, 0x100000000, // DFP Convert From Fixed Quadword Quad X-form (dcffixqq FRTp,VRB) |
| [6]*argField{ap_FPReg_6_10, ap_VecReg_16_20}}, |
| {DCTFIXQQ, 0xfc1f07fe00000000, 0xfc0107c400000000, 0x100000000, // DFP Convert To Fixed Quadword Quad X-form (dctfixqq VRT,FRBp) |
| [6]*argField{ap_VecReg_6_10, ap_FPReg_16_20}}, |
| {LXVKQ, 0xfc1f07fe00000000, 0xf01f02d000000000, 0x0, // Load VSX Vector Special Value Quadword X-form (lxvkq XT,UIM) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_ImmUnsigned_16_20}}, |
| {LXVP, 0xfc00000f00000000, 0x1800000000000000, 0x0, // Load VSX Vector Paired DQ-form (lxvp XTp,DQ(RA)) |
| [6]*argField{ap_VecSpReg_10_10_6_9, ap_Offset_16_27_shift4, ap_Reg_11_15}}, |
| {LXVPX, 0xfc0007fe00000000, 0x7c00029a00000000, 0x100000000, // Load VSX Vector Paired Indexed X-form (lxvpx XTp,RA,RB) |
| [6]*argField{ap_VecSpReg_10_10_6_9, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVRBX, 0xfc0007fe00000000, 0x7c00001a00000000, 0x0, // Load VSX Vector Rightmost Byte Indexed X-form (lxvrbx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVRDX, 0xfc0007fe00000000, 0x7c0000da00000000, 0x0, // Load VSX Vector Rightmost Doubleword Indexed X-form (lxvrdx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVRHX, 0xfc0007fe00000000, 0x7c00005a00000000, 0x0, // Load VSX Vector Rightmost Halfword Indexed X-form (lxvrhx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVRWX, 0xfc0007fe00000000, 0x7c00009a00000000, 0x0, // Load VSX Vector Rightmost Word Indexed X-form (lxvrwx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {MTVSRBM, 0xfc1f07ff00000000, 0x1010064200000000, 0x0, // Move to VSR Byte Mask VX-form (mtvsrbm VRT,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_16_20}}, |
| {MTVSRBMI, 0xfc00003e00000000, 0x1000001400000000, 0x0, // Move To VSR Byte Mask Immediate DX-form (mtvsrbmi VRT,bm) |
| [6]*argField{ap_VecReg_6_10, ap_ImmUnsigned_16_25_11_15_31_31}}, |
| {MTVSRDM, 0xfc1f07ff00000000, 0x1013064200000000, 0x0, // Move to VSR Doubleword Mask VX-form (mtvsrdm VRT,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_16_20}}, |
| {MTVSRHM, 0xfc1f07ff00000000, 0x1011064200000000, 0x0, // Move to VSR Halfword Mask VX-form (mtvsrhm VRT,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_16_20}}, |
| {MTVSRQM, 0xfc1f07ff00000000, 0x1014064200000000, 0x0, // Move to VSR Quadword Mask VX-form (mtvsrqm VRT,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_16_20}}, |
| {MTVSRWM, 0xfc1f07ff00000000, 0x1012064200000000, 0x0, // Move to VSR Word Mask VX-form (mtvsrwm VRT,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_16_20}}, |
| {PADDI, 0xff800000fc000000, 0x600000038000000, 0x6c000000000000, // Prefixed Add Immediate MLS:D-form (paddi RT,RA,SI,R) |
| [6]*argField{ap_Reg_38_42, ap_Reg_43_47, ap_ImmSigned_14_31_48_63, ap_ImmUnsigned_11_11}}, |
| {PDEPD, 0xfc0007fe00000000, 0x7c00013800000000, 0x100000000, // Parallel Bits Deposit Doubleword X-form (pdepd RA,RS,RB) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}}, |
| {PEXTD, 0xfc0007fe00000000, 0x7c00017800000000, 0x100000000, // Parallel Bits Extract Doubleword X-form (pextd RA,RS,RB) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}}, |
| {PLBZ, 0xff800000fc000000, 0x600000088000000, 0x6c000000000000, // Prefixed Load Byte and Zero MLS:D-form (plbz RT,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLD, 0xff800000fc000000, 0x4000000e4000000, 0x6c000000000000, // Prefixed Load Doubleword 8LS:D-form (pld RT,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLFD, 0xff800000fc000000, 0x6000000c8000000, 0x6c000000000000, // Prefixed Load Floating-Point Double MLS:D-form (plfd FRT,D(RA),R) |
| [6]*argField{ap_FPReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLFS, 0xff800000fc000000, 0x6000000c0000000, 0x6c000000000000, // Prefixed Load Floating-Point Single MLS:D-form (plfs FRT,D(RA),R) |
| [6]*argField{ap_FPReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLHA, 0xff800000fc000000, 0x6000000a8000000, 0x6c000000000000, // Prefixed Load Halfword Algebraic MLS:D-form (plha RT,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLHZ, 0xff800000fc000000, 0x6000000a0000000, 0x6c000000000000, // Prefixed Load Halfword and Zero MLS:D-form (plhz RT,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLQ, 0xff800000fc000000, 0x4000000e0000000, 0x6c000000000000, // Prefixed Load Quadword 8LS:D-form (plq RTp,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLWA, 0xff800000fc000000, 0x4000000a4000000, 0x6c000000000000, // Prefixed Load Word Algebraic 8LS:D-form (plwa RT,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLWZ, 0xff800000fc000000, 0x600000080000000, 0x6c000000000000, // Prefixed Load Word and Zero MLS:D-form (plwz RT,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLXSD, 0xff800000fc000000, 0x4000000a8000000, 0x6c000000000000, // Prefixed Load VSX Scalar Doubleword 8LS:D-form (plxsd VRT,D(RA),R) |
| [6]*argField{ap_VecReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLXSSP, 0xff800000fc000000, 0x4000000ac000000, 0x6c000000000000, // Prefixed Load VSX Scalar Single-Precision 8LS:D-form (plxssp VRT,D(RA),R) |
| [6]*argField{ap_VecReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLXV, 0xff800000f8000000, 0x4000000c8000000, 0x6c000000000000, // Prefixed Load VSX Vector 8LS:D-form (plxv XT,D(RA),R) |
| [6]*argField{ap_VecSReg_37_37_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PLXVP, 0xff800000fc000000, 0x4000000e8000000, 0x6c000000000000, // Prefixed Load VSX Vector Paired 8LS:D-form (plxvp XTp,D(RA),R) |
| [6]*argField{ap_VecSpReg_42_42_38_41, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PMXVBF16GER2, 0xfff00000fc0007f8, 0x7900000ec000198, 0xf3f0000000000, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) MMIRR:XX3-form (pmxvbf16ger2 AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVBF16GER2NN, 0xfff00000fc0007f8, 0x7900000ec000790, 0xf3f0000000000, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) Negative multiply, Negative accumulate MMIRR:XX3-form (pmxvbf16ger2nn AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVBF16GER2NP, 0xfff00000fc0007f8, 0x7900000ec000390, 0xf3f0000000000, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) Negative multiply, Positive accumulate MMIRR:XX3-form (pmxvbf16ger2np AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVBF16GER2PN, 0xfff00000fc0007f8, 0x7900000ec000590, 0xf3f0000000000, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) Positive multiply, Negative accumulate MMIRR:XX3-form (pmxvbf16ger2pn AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVBF16GER2PP, 0xfff00000fc0007f8, 0x7900000ec000190, 0xf3f0000000000, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvbf16ger2pp AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVF16GER2, 0xfff00000fc0007f8, 0x7900000ec000098, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) MMIRR:XX3-form (pmxvf16ger2 AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVF16GER2NN, 0xfff00000fc0007f8, 0x7900000ec000690, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) Negative multiply, Negative accumulate MMIRR:XX3-form (pmxvf16ger2nn AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVF16GER2NP, 0xfff00000fc0007f8, 0x7900000ec000290, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) Negative multiply, Positive accumulate MMIRR:XX3-form (pmxvf16ger2np AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVF16GER2PN, 0xfff00000fc0007f8, 0x7900000ec000490, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) Positive multiply, Negative accumulate MMIRR:XX3-form (pmxvf16ger2pn AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVF16GER2PP, 0xfff00000fc0007f8, 0x7900000ec000090, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvf16ger2pp AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVF32GER, 0xfff00000fc0007f8, 0x7900000ec0000d8, 0xfff0000000000, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) MMIRR:XX3-form (pmxvf32ger AT,XA,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}}, |
| {PMXVF32GERNN, 0xfff00000fc0007f8, 0x7900000ec0006d0, 0xfff0000000000, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate MMIRR:XX3-form (pmxvf32gernn AT,XA,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}}, |
| {PMXVF32GERNP, 0xfff00000fc0007f8, 0x7900000ec0002d0, 0xfff0000000000, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate MMIRR:XX3-form (pmxvf32gernp AT,XA,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}}, |
| {PMXVF32GERPN, 0xfff00000fc0007f8, 0x7900000ec0004d0, 0xfff0000000000, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate MMIRR:XX3-form (pmxvf32gerpn AT,XA,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}}, |
| {PMXVF32GERPP, 0xfff00000fc0007f8, 0x7900000ec0000d0, 0xfff0000000000, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvf32gerpp AT,XA,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}}, |
| {PMXVF64GER, 0xfff00000fc0007f8, 0x7900000ec0001d8, 0xfff0300000000, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) MMIRR:XX3-form (pmxvf64ger AT,XAp,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}}, |
| {PMXVF64GERNN, 0xfff00000fc0007f8, 0x7900000ec0007d0, 0xfff0300000000, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate MMIRR:XX3-form (pmxvf64gernn AT,XAp,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}}, |
| {PMXVF64GERNP, 0xfff00000fc0007f8, 0x7900000ec0003d0, 0xfff0300000000, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate MMIRR:XX3-form (pmxvf64gernp AT,XAp,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}}, |
| {PMXVF64GERPN, 0xfff00000fc0007f8, 0x7900000ec0005d0, 0xfff0300000000, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate MMIRR:XX3-form (pmxvf64gerpn AT,XAp,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}}, |
| {PMXVF64GERPP, 0xfff00000fc0007f8, 0x7900000ec0001d0, 0xfff0300000000, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvf64gerpp AT,XAp,XB,XMSK,YMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}}, |
| {PMXVI16GER2, 0xfff00000fc0007f8, 0x7900000ec000258, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) MMIRR:XX3-form (pmxvi16ger2 AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVI16GER2PP, 0xfff00000fc0007f8, 0x7900000ec000358, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi16ger2pp AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVI16GER2S, 0xfff00000fc0007f8, 0x7900000ec000158, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation MMIRR:XX3-form (pmxvi16ger2s AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVI16GER2SPP, 0xfff00000fc0007f8, 0x7900000ec000150, 0xf3f0000000000, // Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi16ger2spp AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}}, |
| {PMXVI4GER8, 0xfff00000fc0007f8, 0x7900000ec000118, 0xf000000000000, // Prefixed Masked VSX Vector 4-bit Signed Integer GER (rank-8 update) MMIRR:XX3-form (pmxvi4ger8 AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_23}}, |
| {PMXVI4GER8PP, 0xfff00000fc0007f8, 0x7900000ec000110, 0xf000000000000, // Prefixed Masked VSX Vector 4-bit Signed Integer GER (rank-8 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi4ger8pp AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_23}}, |
| {PMXVI8GER4, 0xfff00000fc0007f8, 0x7900000ec000018, 0xf0f0000000000, // Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) MMIRR:XX3-form (pmxvi8ger4 AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_19}}, |
| {PMXVI8GER4PP, 0xfff00000fc0007f8, 0x7900000ec000010, 0xf0f0000000000, // Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi8ger4pp AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_19}}, |
| {PMXVI8GER4SPP, 0xfff00000fc0007f8, 0x7900000ec000318, 0xf0f0000000000, // Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) with Saturate Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi8ger4spp AT,XA,XB,XMSK,YMSK,PMSK) |
| [6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_19}}, |
| {PNOP, 0xfff3fffe00000000, 0x700000000000000, 0xc000100000000, // Prefixed Nop MRR:*-form (pnop) |
| [6]*argField{}}, |
| {PSTB, 0xff800000fc000000, 0x600000098000000, 0x6c000000000000, // Prefixed Store Byte MLS:D-form (pstb RS,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTD, 0xff800000fc000000, 0x4000000f4000000, 0x6c000000000000, // Prefixed Store Doubleword 8LS:D-form (pstd RS,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTFD, 0xff800000fc000000, 0x6000000d8000000, 0x6c000000000000, // Prefixed Store Floating-Point Double MLS:D-form (pstfd FRS,D(RA),R) |
| [6]*argField{ap_FPReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTFS, 0xff800000fc000000, 0x6000000d0000000, 0x6c000000000000, // Prefixed Store Floating-Point Single MLS:D-form (pstfs FRS,D(RA),R) |
| [6]*argField{ap_FPReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTH, 0xff800000fc000000, 0x6000000b0000000, 0x6c000000000000, // Prefixed Store Halfword MLS:D-form (psth RS,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTQ, 0xff800000fc000000, 0x4000000f0000000, 0x6c000000000000, // Prefixed Store Quadword 8LS:D-form (pstq RSp,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTW, 0xff800000fc000000, 0x600000090000000, 0x6c000000000000, // Prefixed Store Word MLS:D-form (pstw RS,D(RA),R) |
| [6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTXSD, 0xff800000fc000000, 0x4000000b8000000, 0x6c000000000000, // Prefixed Store VSX Scalar Doubleword 8LS:D-form (pstxsd VRS,D(RA),R) |
| [6]*argField{ap_VecReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTXSSP, 0xff800000fc000000, 0x4000000bc000000, 0x6c000000000000, // Prefixed Store VSX Scalar Single-Precision 8LS:D-form (pstxssp VRS,D(RA),R) |
| [6]*argField{ap_VecReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTXV, 0xff800000f8000000, 0x4000000d8000000, 0x6c000000000000, // Prefixed Store VSX Vector 8LS:D-form (pstxv XS,D(RA),R) |
| [6]*argField{ap_VecSReg_37_37_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {PSTXVP, 0xff800000fc000000, 0x4000000f8000000, 0x6c000000000000, // Prefixed Store VSX Vector Paired 8LS:D-form (pstxvp XSp,D(RA),R) |
| [6]*argField{ap_VecSpReg_42_42_38_41, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}}, |
| {SETBC, 0xfc0007fe00000000, 0x7c00030000000000, 0xf80100000000, // Set Boolean Condition X-form (setbc RT,BI) |
| [6]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}}, |
| {SETBCR, 0xfc0007fe00000000, 0x7c00034000000000, 0xf80100000000, // Set Boolean Condition Reverse X-form (setbcr RT,BI) |
| [6]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}}, |
| {SETNBC, 0xfc0007fe00000000, 0x7c00038000000000, 0xf80100000000, // Set Negative Boolean Condition X-form (setnbc RT,BI) |
| [6]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}}, |
| {SETNBCR, 0xfc0007fe00000000, 0x7c0003c000000000, 0xf80100000000, // Set Negative Boolean Condition Reverse X-form (setnbcr RT,BI) |
| [6]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}}, |
| {STXVP, 0xfc00000f00000000, 0x1800000100000000, 0x0, // Store VSX Vector Paired DQ-form (stxvp XSp,DQ(RA)) |
| [6]*argField{ap_VecSpReg_10_10_6_9, ap_Offset_16_27_shift4, ap_Reg_11_15}}, |
| {STXVPX, 0xfc0007fe00000000, 0x7c00039a00000000, 0x100000000, // Store VSX Vector Paired Indexed X-form (stxvpx XSp,RA,RB) |
| [6]*argField{ap_VecSpReg_10_10_6_9, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXVRBX, 0xfc0007fe00000000, 0x7c00011a00000000, 0x0, // Store VSX Vector Rightmost Byte Indexed X-form (stxvrbx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXVRDX, 0xfc0007fe00000000, 0x7c0001da00000000, 0x0, // Store VSX Vector Rightmost Doubleword Indexed X-form (stxvrdx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXVRHX, 0xfc0007fe00000000, 0x7c00015a00000000, 0x0, // Store VSX Vector Rightmost Halfword Indexed X-form (stxvrhx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXVRWX, 0xfc0007fe00000000, 0x7c00019a00000000, 0x0, // Store VSX Vector Rightmost Word Indexed X-form (stxvrwx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VCFUGED, 0xfc0007ff00000000, 0x1000054d00000000, 0x0, // Vector Centrifuge Doubleword VX-form (vcfuged VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCLRLB, 0xfc0007ff00000000, 0x1000018d00000000, 0x0, // Vector Clear Leftmost Bytes VX-form (vclrlb VRT,VRA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_Reg_16_20}}, |
| {VCLRRB, 0xfc0007ff00000000, 0x100001cd00000000, 0x0, // Vector Clear Rightmost Bytes VX-form (vclrrb VRT,VRA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_Reg_16_20}}, |
| {VCLZDM, 0xfc0007ff00000000, 0x1000078400000000, 0x0, // Vector Count Leading Zeros Doubleword under bit Mask VX-form (vclzdm VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPEQUQ, 0xfc0007ff00000000, 0x100001c700000000, 0x0, // Vector Compare Equal Quadword VC-form (vcmpequq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPEQUQCC, 0xfc0007ff00000000, 0x100005c700000000, 0x0, // Vector Compare Equal Quadword VC-form (vcmpequq. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPGTSQ, 0xfc0007ff00000000, 0x1000038700000000, 0x0, // Vector Compare Greater Than Signed Quadword VC-form (vcmpgtsq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPGTSQCC, 0xfc0007ff00000000, 0x1000078700000000, 0x0, // Vector Compare Greater Than Signed Quadword VC-form (vcmpgtsq. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPGTUQ, 0xfc0007ff00000000, 0x1000028700000000, 0x0, // Vector Compare Greater Than Unsigned Quadword VC-form (vcmpgtuq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPGTUQCC, 0xfc0007ff00000000, 0x1000068700000000, 0x0, // Vector Compare Greater Than Unsigned Quadword VC-form (vcmpgtuq. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPSQ, 0xfc0007ff00000000, 0x1000014100000000, 0x60000000000000, // Vector Compare Signed Quadword VX-form (vcmpsq BF,VRA,VRB) |
| [6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPUQ, 0xfc0007ff00000000, 0x1000010100000000, 0x60000000000000, // Vector Compare Unsigned Quadword VX-form (vcmpuq BF,VRA,VRB) |
| [6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCNTMBB, 0xfc1e07ff00000000, 0x1018064200000000, 0x0, // Vector Count Mask Bits Byte VX-form (vcntmbb RT,VRB,MP) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}}, |
| {VCNTMBD, 0xfc1e07ff00000000, 0x101e064200000000, 0x0, // Vector Count Mask Bits Doubleword VX-form (vcntmbd RT,VRB,MP) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}}, |
| {VCNTMBH, 0xfc1e07ff00000000, 0x101a064200000000, 0x0, // Vector Count Mask Bits Halfword VX-form (vcntmbh RT,VRB,MP) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}}, |
| {VCNTMBW, 0xfc1e07ff00000000, 0x101c064200000000, 0x0, // Vector Count Mask Bits Word VX-form (vcntmbw RT,VRB,MP) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}}, |
| {VCTZDM, 0xfc0007ff00000000, 0x100007c400000000, 0x0, // Vector Count Trailing Zeros Doubleword under bit Mask VX-form (vctzdm VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVESD, 0xfc0007ff00000000, 0x100003cb00000000, 0x0, // Vector Divide Extended Signed Doubleword VX-form (vdivesd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVESQ, 0xfc0007ff00000000, 0x1000030b00000000, 0x0, // Vector Divide Extended Signed Quadword VX-form (vdivesq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVESW, 0xfc0007ff00000000, 0x1000038b00000000, 0x0, // Vector Divide Extended Signed Word VX-form (vdivesw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVEUD, 0xfc0007ff00000000, 0x100002cb00000000, 0x0, // Vector Divide Extended Unsigned Doubleword VX-form (vdiveud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVEUQ, 0xfc0007ff00000000, 0x1000020b00000000, 0x0, // Vector Divide Extended Unsigned Quadword VX-form (vdiveuq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVEUW, 0xfc0007ff00000000, 0x1000028b00000000, 0x0, // Vector Divide Extended Unsigned Word VX-form (vdiveuw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVSD, 0xfc0007ff00000000, 0x100001cb00000000, 0x0, // Vector Divide Signed Doubleword VX-form (vdivsd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVSQ, 0xfc0007ff00000000, 0x1000010b00000000, 0x0, // Vector Divide Signed Quadword VX-form (vdivsq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVSW, 0xfc0007ff00000000, 0x1000018b00000000, 0x0, // Vector Divide Signed Word VX-form (vdivsw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVUD, 0xfc0007ff00000000, 0x100000cb00000000, 0x0, // Vector Divide Unsigned Doubleword VX-form (vdivud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVUQ, 0xfc0007ff00000000, 0x1000000b00000000, 0x0, // Vector Divide Unsigned Quadword VX-form (vdivuq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VDIVUW, 0xfc0007ff00000000, 0x1000008b00000000, 0x0, // Vector Divide Unsigned Word VX-form (vdivuw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VEXPANDBM, 0xfc1f07ff00000000, 0x1000064200000000, 0x0, // Vector Expand Byte Mask VX-form (vexpandbm VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXPANDDM, 0xfc1f07ff00000000, 0x1003064200000000, 0x0, // Vector Expand Doubleword Mask VX-form (vexpanddm VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXPANDHM, 0xfc1f07ff00000000, 0x1001064200000000, 0x0, // Vector Expand Halfword Mask VX-form (vexpandhm VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXPANDQM, 0xfc1f07ff00000000, 0x1004064200000000, 0x0, // Vector Expand Quadword Mask VX-form (vexpandqm VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXPANDWM, 0xfc1f07ff00000000, 0x1002064200000000, 0x0, // Vector Expand Word Mask VX-form (vexpandwm VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXTDDVLX, 0xfc00003f00000000, 0x1000001e00000000, 0x0, // Vector Extract Double Doubleword to VSR using GPR-specified Left-Index VA-form (vextddvlx VRT,VRA,VRB,RC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}}, |
| {VEXTDDVRX, 0xfc00003f00000000, 0x1000001f00000000, 0x0, // Vector Extract Double Doubleword to VSR using GPR-specified Right-Index VA-form (vextddvrx VRT,VRA,VRB,RC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}}, |
| {VEXTDUBVLX, 0xfc00003f00000000, 0x1000001800000000, 0x0, // Vector Extract Double Unsigned Byte to VSR using GPR-specified Left-Index VA-form (vextdubvlx VRT,VRA,VRB,RC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}}, |
| {VEXTDUBVRX, 0xfc00003f00000000, 0x1000001900000000, 0x0, // Vector Extract Double Unsigned Byte to VSR using GPR-specified Right-Index VA-form (vextdubvrx VRT,VRA,VRB,RC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}}, |
| {VEXTDUHVLX, 0xfc00003f00000000, 0x1000001a00000000, 0x0, // Vector Extract Double Unsigned Halfword to VSR using GPR-specified Left-Index VA-form (vextduhvlx VRT,VRA,VRB,RC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}}, |
| {VEXTDUHVRX, 0xfc00003f00000000, 0x1000001b00000000, 0x0, // Vector Extract Double Unsigned Halfword to VSR using GPR-specified Right-Index VA-form (vextduhvrx VRT,VRA,VRB,RC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}}, |
| {VEXTDUWVLX, 0xfc00003f00000000, 0x1000001c00000000, 0x0, // Vector Extract Double Unsigned Word to VSR using GPR-specified Left-Index VA-form (vextduwvlx VRT,VRA,VRB,RC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}}, |
| {VEXTDUWVRX, 0xfc00003f00000000, 0x1000001d00000000, 0x0, // Vector Extract Double Unsigned Word to VSR using GPR-specified Right-Index VA-form (vextduwvrx VRT,VRA,VRB,RC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}}, |
| {VEXTRACTBM, 0xfc1f07ff00000000, 0x1008064200000000, 0x0, // Vector Extract Byte Mask VX-form (vextractbm RT,VRB) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20}}, |
| {VEXTRACTDM, 0xfc1f07ff00000000, 0x100b064200000000, 0x0, // Vector Extract Doubleword Mask VX-form (vextractdm RT,VRB) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20}}, |
| {VEXTRACTHM, 0xfc1f07ff00000000, 0x1009064200000000, 0x0, // Vector Extract Halfword Mask VX-form (vextracthm RT,VRB) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20}}, |
| {VEXTRACTQM, 0xfc1f07ff00000000, 0x100c064200000000, 0x0, // Vector Extract Quadword Mask VX-form (vextractqm RT,VRB) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20}}, |
| {VEXTRACTWM, 0xfc1f07ff00000000, 0x100a064200000000, 0x0, // Vector Extract Word Mask VX-form (vextractwm RT,VRB) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20}}, |
| {VEXTSD2Q, 0xfc1f07ff00000000, 0x101b060200000000, 0x0, // Vector Extend Sign Doubleword to Quadword VX-form (vextsd2q VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VGNB, 0xfc0007ff00000000, 0x100004cc00000000, 0x18000000000000, // Vector Gather every Nth Bit VX-form (vgnb RT,VRB,N) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_13_15}}, |
| {VINSBLX, 0xfc0007ff00000000, 0x1000020f00000000, 0x0, // Vector Insert Byte from GPR using GPR-specified Left-Index VX-form (vinsblx VRT,RA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VINSBRX, 0xfc0007ff00000000, 0x1000030f00000000, 0x0, // Vector Insert Byte from GPR using GPR-specified Right-Index VX-form (vinsbrx VRT,RA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VINSBVLX, 0xfc0007ff00000000, 0x1000000f00000000, 0x0, // Vector Insert Byte from VSR using GPR-specified Left-Index VX-form (vinsbvlx VRT,RA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VINSBVRX, 0xfc0007ff00000000, 0x1000010f00000000, 0x0, // Vector Insert Byte from VSR using GPR-specified Right-Index VX-form (vinsbvrx VRT,RA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VINSD, 0xfc0007ff00000000, 0x100001cf00000000, 0x10000000000000, // Vector Insert Doubleword from GPR using immediate-specified index VX-form (vinsd VRT,RB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_16_20, ap_ImmUnsigned_12_15}}, |
| {VINSDLX, 0xfc0007ff00000000, 0x100002cf00000000, 0x0, // Vector Insert Doubleword from GPR using GPR-specified Left-Index VX-form (vinsdlx VRT,RA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VINSDRX, 0xfc0007ff00000000, 0x100003cf00000000, 0x0, // Vector Insert Doubleword from GPR using GPR-specified Right-Index VX-form (vinsdrx VRT,RA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VINSHLX, 0xfc0007ff00000000, 0x1000024f00000000, 0x0, // Vector Insert Halfword from GPR using GPR-specified Left-Index VX-form (vinshlx VRT,RA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VINSHRX, 0xfc0007ff00000000, 0x1000034f00000000, 0x0, // Vector Insert Halfword from GPR using GPR-specified Right-Index VX-form (vinshrx VRT,RA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VINSHVLX, 0xfc0007ff00000000, 0x1000004f00000000, 0x0, // Vector Insert Halfword from VSR using GPR-specified Left-Index VX-form (vinshvlx VRT,RA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VINSHVRX, 0xfc0007ff00000000, 0x1000014f00000000, 0x0, // Vector Insert Halfword from VSR using GPR-specified Right-Index VX-form (vinshvrx VRT,RA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VINSW, 0xfc0007ff00000000, 0x100000cf00000000, 0x10000000000000, // Vector Insert Word from GPR using immediate-specified index VX-form (vinsw VRT,RB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_16_20, ap_ImmUnsigned_12_15}}, |
| {VINSWLX, 0xfc0007ff00000000, 0x1000028f00000000, 0x0, // Vector Insert Word from GPR using GPR-specified Left-Index VX-form (vinswlx VRT,RA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VINSWRX, 0xfc0007ff00000000, 0x1000038f00000000, 0x0, // Vector Insert Word from GPR using GPR-specified Right-Index VX-form (vinswrx VRT,RA,RB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VINSWVLX, 0xfc0007ff00000000, 0x1000008f00000000, 0x0, // Vector Insert Word from VSR using GPR-specified Left-Index VX-form (vinswvlx VRT,RA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VINSWVRX, 0xfc0007ff00000000, 0x1000018f00000000, 0x0, // Vector Insert Word from VSR using GPR-specified Left-Index VX-form (vinswvrx VRT,RA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VMODSD, 0xfc0007ff00000000, 0x100007cb00000000, 0x0, // Vector Modulo Signed Doubleword VX-form (vmodsd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMODSQ, 0xfc0007ff00000000, 0x1000070b00000000, 0x0, // Vector Modulo Signed Quadword VX-form (vmodsq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMODSW, 0xfc0007ff00000000, 0x1000078b00000000, 0x0, // Vector Modulo Signed Word VX-form (vmodsw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMODUD, 0xfc0007ff00000000, 0x100006cb00000000, 0x0, // Vector Modulo Unsigned Doubleword VX-form (vmodud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMODUQ, 0xfc0007ff00000000, 0x1000060b00000000, 0x0, // Vector Modulo Unsigned Quadword VX-form (vmoduq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMODUW, 0xfc0007ff00000000, 0x1000068b00000000, 0x0, // Vector Modulo Unsigned Word VX-form (vmoduw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMSUMCUD, 0xfc00003f00000000, 0x1000001700000000, 0x0, // Vector Multiply-Sum & write Carry-out Unsigned Doubleword VA-form (vmsumcud VRT,VRA,VRB,VRC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}}, |
| {VMULESD, 0xfc0007ff00000000, 0x100003c800000000, 0x0, // Vector Multiply Even Signed Doubleword VX-form (vmulesd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULEUD, 0xfc0007ff00000000, 0x100002c800000000, 0x0, // Vector Multiply Even Unsigned Doubleword VX-form (vmuleud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULHSD, 0xfc0007ff00000000, 0x100003c900000000, 0x0, // Vector Multiply High Signed Doubleword VX-form (vmulhsd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULHSW, 0xfc0007ff00000000, 0x1000038900000000, 0x0, // Vector Multiply High Signed Word VX-form (vmulhsw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULHUD, 0xfc0007ff00000000, 0x100002c900000000, 0x0, // Vector Multiply High Unsigned Doubleword VX-form (vmulhud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULHUW, 0xfc0007ff00000000, 0x1000028900000000, 0x0, // Vector Multiply High Unsigned Word VX-form (vmulhuw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULLD, 0xfc0007ff00000000, 0x100001c900000000, 0x0, // Vector Multiply Low Doubleword VX-form (vmulld VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULOSD, 0xfc0007ff00000000, 0x100001c800000000, 0x0, // Vector Multiply Odd Signed Doubleword VX-form (vmulosd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULOUD, 0xfc0007ff00000000, 0x100000c800000000, 0x0, // Vector Multiply Odd Unsigned Doubleword VX-form (vmuloud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPDEPD, 0xfc0007ff00000000, 0x100005cd00000000, 0x0, // Vector Parallel Bits Deposit Doubleword VX-form (vpdepd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPEXTD, 0xfc0007ff00000000, 0x1000058d00000000, 0x0, // Vector Parallel Bits Extract Doubleword VX-form (vpextd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VRLQ, 0xfc0007ff00000000, 0x1000000500000000, 0x0, // Vector Rotate Left Quadword VX-form (vrlq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VRLQMI, 0xfc0007ff00000000, 0x1000004500000000, 0x0, // Vector Rotate Left Quadword then Mask Insert VX-form (vrlqmi VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VRLQNM, 0xfc0007ff00000000, 0x1000014500000000, 0x0, // Vector Rotate Left Quadword then AND with Mask VX-form (vrlqnm VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSLDBI, 0xfc00063f00000000, 0x1000001600000000, 0x0, // Vector Shift Left Double by Bit Immediate VN-form (vsldbi VRT,VRA,VRB,SH) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_23_25}}, |
| {VSLQ, 0xfc0007ff00000000, 0x1000010500000000, 0x0, // Vector Shift Left Quadword VX-form (vslq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSRAQ, 0xfc0007ff00000000, 0x1000030500000000, 0x0, // Vector Shift Right Algebraic Quadword VX-form (vsraq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSRDBI, 0xfc00063f00000000, 0x1000021600000000, 0x0, // Vector Shift Right Double by Bit Immediate VN-form (vsrdbi VRT,VRA,VRB,SH) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_23_25}}, |
| {VSRQ, 0xfc0007ff00000000, 0x1000020500000000, 0x0, // Vector Shift Right Quadword VX-form (vsrq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSTRIBL, 0xfc1f07ff00000000, 0x1000000d00000000, 0x0, // Vector String Isolate Byte Left-justified VX-form (vstribl VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VSTRIBLCC, 0xfc1f07ff00000000, 0x1000040d00000000, 0x0, // Vector String Isolate Byte Left-justified VX-form (vstribl. VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VSTRIBR, 0xfc1f07ff00000000, 0x1001000d00000000, 0x0, // Vector String Isolate Byte Right-justified VX-form (vstribr VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VSTRIBRCC, 0xfc1f07ff00000000, 0x1001040d00000000, 0x0, // Vector String Isolate Byte Right-justified VX-form (vstribr. VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VSTRIHL, 0xfc1f07ff00000000, 0x1002000d00000000, 0x0, // Vector String Isolate Halfword Left-justified VX-form (vstrihl VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VSTRIHLCC, 0xfc1f07ff00000000, 0x1002040d00000000, 0x0, // Vector String Isolate Halfword Left-justified VX-form (vstrihl. VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VSTRIHR, 0xfc1f07ff00000000, 0x1003000d00000000, 0x0, // Vector String Isolate Halfword Right-justified VX-form (vstrihr VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VSTRIHRCC, 0xfc1f07ff00000000, 0x1003040d00000000, 0x0, // Vector String Isolate Halfword Right-justified VX-form (vstrihr. VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCMPEQQP, 0xfc0007fe00000000, 0xfc00008800000000, 0x100000000, // VSX Scalar Compare Equal Quad-Precision X-form (xscmpeqqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSCMPGEQP, 0xfc0007fe00000000, 0xfc00018800000000, 0x100000000, // VSX Scalar Compare Greater Than or Equal Quad-Precision X-form (xscmpgeqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSCMPGTQP, 0xfc0007fe00000000, 0xfc0001c800000000, 0x100000000, // VSX Scalar Compare Greater Than Quad-Precision X-form (xscmpgtqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSCVQPSQZ, 0xfc1f07fe00000000, 0xfc08068800000000, 0x100000000, // VSX Scalar Convert with round to zero Quad-Precision to Signed Quadword X-form (xscvqpsqz VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVQPUQZ, 0xfc1f07fe00000000, 0xfc00068800000000, 0x100000000, // VSX Scalar Convert with round to zero Quad-Precision to Unsigned Quadword X-form (xscvqpuqz VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVSQQP, 0xfc1f07fe00000000, 0xfc0b068800000000, 0x100000000, // VSX Scalar Convert with round Signed Quadword to Quad-Precision X-form (xscvsqqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVUQQP, 0xfc1f07fe00000000, 0xfc03068800000000, 0x100000000, // VSX Scalar Convert with round Unsigned Quadword to Quad-Precision X-form (xscvuqqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSMAXCQP, 0xfc0007fe00000000, 0xfc00054800000000, 0x100000000, // VSX Scalar Maximum Type-C Quad-Precision X-form (xsmaxcqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSMINCQP, 0xfc0007fe00000000, 0xfc0005c800000000, 0x100000000, // VSX Scalar Minimum Type-C Quad-Precision X-form (xsmincqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XVBF16GER2, 0xfc0007f800000000, 0xec00019800000000, 0x60000100000000, // VSX Vector bfloat16 GER (Rank-2 Update) XX3-form (xvbf16ger2 AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVBF16GER2NN, 0xfc0007f800000000, 0xec00079000000000, 0x60000100000000, // VSX Vector bfloat16 GER (Rank-2 Update) Negative multiply, Negative accumulate XX3-form (xvbf16ger2nn AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVBF16GER2NP, 0xfc0007f800000000, 0xec00039000000000, 0x60000100000000, // VSX Vector bfloat16 GER (Rank-2 Update) Negative multiply, Positive accumulate XX3-form (xvbf16ger2np AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVBF16GER2PN, 0xfc0007f800000000, 0xec00059000000000, 0x60000100000000, // VSX Vector bfloat16 GER (Rank-2 Update) Positive multiply, Negative accumulate XX3-form (xvbf16ger2pn AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVBF16GER2PP, 0xfc0007f800000000, 0xec00019000000000, 0x60000100000000, // VSX Vector bfloat16 GER (Rank-2 Update) Positive multiply, Positive accumulate XX3-form (xvbf16ger2pp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVCVBF16SPN, 0xfc1f07fc00000000, 0xf010076c00000000, 0x0, // VSX Vector Convert bfloat16 to Single-Precision format XX2-form (xvcvbf16spn XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XVCVSPBF16, 0xfc1f07fc00000000, 0xf011076c00000000, 0x0, // VSX Vector Convert with round Single-Precision to bfloat16 format XX2-form (xvcvspbf16 XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XVF16GER2, 0xfc0007f800000000, 0xec00009800000000, 0x60000100000000, // VSX Vector 16-bit Floating-Point GER (rank-2 update) XX3-form (xvf16ger2 AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF16GER2NN, 0xfc0007f800000000, 0xec00069000000000, 0x60000100000000, // VSX Vector 16-bit Floating-Point GER (rank-2 update) Negative multiply, Negative accumulate XX3-form (xvf16ger2nn AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF16GER2NP, 0xfc0007f800000000, 0xec00029000000000, 0x60000100000000, // VSX Vector 16-bit Floating-Point GER (rank-2 update) Negative multiply, Positive accumulate XX3-form (xvf16ger2np AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF16GER2PN, 0xfc0007f800000000, 0xec00049000000000, 0x60000100000000, // VSX Vector 16-bit Floating-Point GER (rank-2 update) Positive multiply, Negative accumulate XX3-form (xvf16ger2pn AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF16GER2PP, 0xfc0007f800000000, 0xec00009000000000, 0x60000100000000, // VSX Vector 16-bit Floating-Point GER (rank-2 update) Positive multiply, Positive accumulate XX3-form (xvf16ger2pp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF32GER, 0xfc0007f800000000, 0xec0000d800000000, 0x60000100000000, // VSX Vector 32-bit Floating-Point GER (rank-1 update) XX3-form (xvf32ger AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF32GERNN, 0xfc0007f800000000, 0xec0006d000000000, 0x60000100000000, // VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate XX3-form (xvf32gernn AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF32GERNP, 0xfc0007f800000000, 0xec0002d000000000, 0x60000100000000, // VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate XX3-form (xvf32gernp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF32GERPN, 0xfc0007f800000000, 0xec0004d000000000, 0x60000100000000, // VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate XX3-form (xvf32gerpn AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF32GERPP, 0xfc0007f800000000, 0xec0000d000000000, 0x60000100000000, // VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate XX3-form (xvf32gerpp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF64GER, 0xfc0007f800000000, 0xec0001d800000000, 0x60000100000000, // VSX Vector 64-bit Floating-Point GER (rank-1 update) XX3-form (xvf64ger AT,XAp,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF64GERNN, 0xfc0007f800000000, 0xec0007d000000000, 0x60000100000000, // VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate XX3-form (xvf64gernn AT,XAp,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF64GERNP, 0xfc0007f800000000, 0xec0003d000000000, 0x60000100000000, // VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate XX3-form (xvf64gernp AT,XAp,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF64GERPN, 0xfc0007f800000000, 0xec0005d000000000, 0x60000100000000, // VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate XX3-form (xvf64gerpn AT,XAp,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVF64GERPP, 0xfc0007f800000000, 0xec0001d000000000, 0x60000100000000, // VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate XX3-form (xvf64gerpp AT,XAp,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI16GER2, 0xfc0007f800000000, 0xec00025800000000, 0x60000100000000, // VSX Vector 16-bit Signed Integer GER (rank-2 update) XX3-form (xvi16ger2 AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI16GER2PP, 0xfc0007f800000000, 0xec00035800000000, 0x60000100000000, // VSX Vector 16-bit Signed Integer GER (rank-2 update) Positive multiply, Positive accumulate XX3-form (xvi16ger2pp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI16GER2S, 0xfc0007f800000000, 0xec00015800000000, 0x60000100000000, // VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation XX3-form (xvi16ger2s AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI16GER2SPP, 0xfc0007f800000000, 0xec00015000000000, 0x60000100000000, // VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation Positive multiply, Positive accumulate XX3-form (xvi16ger2spp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI4GER8, 0xfc0007f800000000, 0xec00011800000000, 0x60000100000000, // VSX Vector 4-bit Signed Integer GER (rank-8 update) XX3-form (xvi4ger8 AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI4GER8PP, 0xfc0007f800000000, 0xec00011000000000, 0x60000100000000, // VSX Vector 4-bit Signed Integer GER (rank-8 update) Positive multiply, Positive accumulate XX3-form (xvi4ger8pp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI8GER4, 0xfc0007f800000000, 0xec00001800000000, 0x60000100000000, // VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) XX3-form (xvi8ger4 AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI8GER4PP, 0xfc0007f800000000, 0xec00001000000000, 0x60000100000000, // VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) Positive multiply, Positive accumulate XX3-form (xvi8ger4pp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVI8GER4SPP, 0xfc0007f800000000, 0xec00031800000000, 0x60000100000000, // VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) with Saturate Positive multiply, Positive accumulate XX3-form (xvi8ger4spp AT,XA,XB) |
| [6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVTLSBB, 0xfc1f07fc00000000, 0xf002076c00000000, 0x60000100000000, // VSX Vector Test Least-Significant Bit by Byte XX2-form (xvtlsbb BF,XB) |
| [6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}}, |
| {XXBLENDVB, 0xfff00000fc000030, 0x500000084000000, 0xfffff00000000, // VSX Vector Blend Variable Byte 8RR:XX4-form (xxblendvb XT,XA,XB,XC) |
| [6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57}}, |
| {XXBLENDVD, 0xfff00000fc000030, 0x500000084000030, 0xfffff00000000, // VSX Vector Blend Variable Doubleword 8RR:XX4-form (xxblendvd XT,XA,XB,XC) |
| [6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57}}, |
| {XXBLENDVH, 0xfff00000fc000030, 0x500000084000010, 0xfffff00000000, // VSX Vector Blend Variable Halfword 8RR:XX4-form (xxblendvh XT,XA,XB,XC) |
| [6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57}}, |
| {XXBLENDVW, 0xfff00000fc000030, 0x500000084000020, 0xfffff00000000, // VSX Vector Blend Variable Word 8RR:XX4-form (xxblendvw XT,XA,XB,XC) |
| [6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57}}, |
| {XXEVAL, 0xfff00000fc000030, 0x500000088000010, 0xfff0000000000, // VSX Vector Evaluate 8RR-XX4-form (xxeval XT,XA,XB,XC,IMM) |
| [6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57, ap_ImmUnsigned_24_31}}, |
| {XXGENPCVBM, 0xfc0007fe00000000, 0xf000072800000000, 0x0, // VSX Vector Generate PCV from Byte Mask X-form (xxgenpcvbm XT,VRB,IMM) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}}, |
| {XXGENPCVDM, 0xfc0007fe00000000, 0xf000076a00000000, 0x0, // VSX Vector Generate PCV from Doubleword Mask X-form (xxgenpcvdm XT,VRB,IMM) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}}, |
| {XXGENPCVHM, 0xfc0007fe00000000, 0xf000072a00000000, 0x0, // VSX Vector Generate PCV from Halfword Mask X-form (xxgenpcvhm XT,VRB,IMM) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}}, |
| {XXGENPCVWM, 0xfc0007fe00000000, 0xf000076800000000, 0x0, // VSX Vector Generate PCV from Word Mask X-form (xxgenpcvwm XT,VRB,IMM) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}}, |
| {XXMFACC, 0xfc1f07fe00000000, 0x7c00016200000000, 0x60f80100000000, // VSX Move From Accumulator X-form (xxmfacc AS) |
| [6]*argField{ap_MMAReg_6_8}}, |
| {XXMTACC, 0xfc1f07fe00000000, 0x7c01016200000000, 0x60f80100000000, // VSX Move To Accumulator X-form (xxmtacc AT) |
| [6]*argField{ap_MMAReg_6_8}}, |
| {XXPERMX, 0xfff00000fc000030, 0x500000088000000, 0xffff800000000, // VSX Vector Permute Extended 8RR:XX4-form (xxpermx XT,XA,XB,XC,UIM) |
| [6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57, ap_ImmUnsigned_29_31}}, |
| {XXSETACCZ, 0xfc1f07fe00000000, 0x7c03016200000000, 0x60f80100000000, // VSX Set Accumulator to Zero X-form (xxsetaccz AT) |
| [6]*argField{ap_MMAReg_6_8}}, |
| {XXSPLTI32DX, 0xfff00000fc1c0000, 0x500000080000000, 0xf000000000000, // VSX Vector Splat Immediate32 Doubleword Indexed 8RR:D-form (xxsplti32dx XT,IX,IMM32) |
| [6]*argField{ap_VecSReg_47_47_38_42, ap_ImmUnsigned_46_46, ap_ImmUnsigned_16_31_48_63}}, |
| {XXSPLTIDP, 0xfff00000fc1e0000, 0x500000080040000, 0xf000000000000, // VSX Vector Splat Immediate Double-Precision 8RR:D-form (xxspltidp XT,IMM32) |
| [6]*argField{ap_VecSReg_47_47_38_42, ap_ImmUnsigned_16_31_48_63}}, |
| {XXSPLTIW, 0xfff00000fc1e0000, 0x500000080060000, 0xf000000000000, // VSX Vector Splat Immediate Word 8RR:D-form (xxspltiw XT,IMM32) |
| [6]*argField{ap_VecSReg_47_47_38_42, ap_ImmUnsigned_16_31_48_63}}, |
| {MSGCLRU, 0xfc0007fe00000000, 0x7c0000dc00000000, 0x3ff000100000000, // Ultravisor Message Clear X-form (msgclru RB) |
| [6]*argField{ap_Reg_16_20}}, |
| {MSGSNDU, 0xfc0007fe00000000, 0x7c00009c00000000, 0x3ff000100000000, // Ultravisor Message SendX-form (msgsndu RB) |
| [6]*argField{ap_Reg_16_20}}, |
| {URFID, 0xfc0007fe00000000, 0x4c00026400000000, 0x3fff80100000000, // Ultravisor Return From Interrupt Doubleword XL-form (urfid) |
| [6]*argField{}}, |
| {ADDEX, 0xfc0001fe00000000, 0x7c00015400000000, 0x100000000, // Add Extended using alternate carry bit Z23-form (addex RT,RA,RB,CY) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_21_22}}, |
| {MFFSCDRN, 0xfc1f07fe00000000, 0xfc14048e00000000, 0x100000000, // Move From FPSCR Control & Set DRN X-form (mffscdrn FRT,FRB) |
| [6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}}, |
| {MFFSCDRNI, 0xfc1f07fe00000000, 0xfc15048e00000000, 0xc00100000000, // Move From FPSCR Control & Set DRN Immediate X-form (mffscdrni FRT,DRM) |
| [6]*argField{ap_FPReg_6_10, ap_ImmUnsigned_18_20}}, |
| {MFFSCE, 0xfc1f07fe00000000, 0xfc01048e00000000, 0xf80100000000, // Move From FPSCR & Clear Enables X-form (mffsce FRT) |
| [6]*argField{ap_FPReg_6_10}}, |
| {MFFSCRN, 0xfc1f07fe00000000, 0xfc16048e00000000, 0x100000000, // Move From FPSCR Control & Set RN X-form (mffscrn FRT,FRB) |
| [6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}}, |
| {MFFSCRNI, 0xfc1f07fe00000000, 0xfc17048e00000000, 0xe00100000000, // Move From FPSCR Control & Set RN Immediate X-form (mffscrni FRT,RM) |
| [6]*argField{ap_FPReg_6_10, ap_ImmUnsigned_19_20}}, |
| {MFFSL, 0xfc1f07fe00000000, 0xfc18048e00000000, 0xf80100000000, // Move From FPSCR Lightweight X-form (mffsl FRT) |
| [6]*argField{ap_FPReg_6_10}}, |
| {SLBIAG, 0xfc0007fe00000000, 0x7c0006a400000000, 0x1ef80100000000, // SLB Invalidate All Global X-form (slbiag RS, L) |
| [6]*argField{ap_Reg_6_10, ap_ImmUnsigned_15_15}}, |
| {VMSUMUDM, 0xfc00003f00000000, 0x1000002300000000, 0x0, // Vector Multiply-Sum Unsigned Doubleword Modulo VA-form (vmsumudm VRT,VRA,VRB,VRC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}}, |
| {ADDPCIS, 0xfc00003e00000000, 0x4c00000400000000, 0x0, // Add PC Immediate Shifted DX-form (addpcis RT,D) |
| [6]*argField{ap_Reg_6_10, ap_ImmSigned_16_25_11_15_31_31}}, |
| {BCDCFNCC, 0xfc1f05ff00000000, 0x1007058100000000, 0x0, // Decimal Convert From National VX-form (bcdcfn. VRT,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDCFSQCC, 0xfc1f05ff00000000, 0x1002058100000000, 0x0, // Decimal Convert From Signed Quadword VX-form (bcdcfsq. VRT,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDCFZCC, 0xfc1f05ff00000000, 0x1006058100000000, 0x0, // Decimal Convert From Zoned VX-form (bcdcfz. VRT,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDCPSGNCC, 0xfc0007ff00000000, 0x1000034100000000, 0x0, // Decimal Copy Sign VX-form (bcdcpsgn. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {BCDCTNCC, 0xfc1f05ff00000000, 0x1005058100000000, 0x20000000000, // Decimal Convert To National VX-form (bcdctn. VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {BCDCTSQCC, 0xfc1f05ff00000000, 0x1000058100000000, 0x20000000000, // Decimal Convert To Signed Quadword VX-form (bcdctsq. VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {BCDCTZCC, 0xfc1f05ff00000000, 0x1004058100000000, 0x0, // Decimal Convert To Zoned VX-form (bcdctz. VRT,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDSCC, 0xfc0005ff00000000, 0x100004c100000000, 0x0, // Decimal Shift VX-form (bcds. VRT,VRA,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDSETSGNCC, 0xfc1f05ff00000000, 0x101f058100000000, 0x0, // Decimal Set Sign VX-form (bcdsetsgn. VRT,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDSRCC, 0xfc0005ff00000000, 0x100005c100000000, 0x0, // Decimal Shift and Round VX-form (bcdsr. VRT,VRA,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDTRUNCCC, 0xfc0005ff00000000, 0x1000050100000000, 0x0, // Decimal Truncate VX-form (bcdtrunc. VRT,VRA,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDUSCC, 0xfc0005ff00000000, 0x1000048100000000, 0x20000000000, // Decimal Unsigned Shift VX-form (bcdus. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {BCDUTRUNCCC, 0xfc0005ff00000000, 0x1000054100000000, 0x20000000000, // Decimal Unsigned Truncate VX-form (bcdutrunc. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {CMPEQB, 0xfc0007fe00000000, 0x7c0001c000000000, 0x60000100000000, // Compare Equal Byte X-form (cmpeqb BF,RA,RB) |
| [6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}}, |
| {CMPRB, 0xfc0007fe00000000, 0x7c00018000000000, 0x40000100000000, // Compare Ranged Byte X-form (cmprb BF,L,RA,RB) |
| [6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {CNTTZD, 0xfc0007ff00000000, 0x7c00047400000000, 0xf80000000000, // Count Trailing Zeros Doubleword X-form (cnttzd RA,RS) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10}}, |
| {CNTTZDCC, 0xfc0007ff00000000, 0x7c00047500000000, 0xf80000000000, // Count Trailing Zeros Doubleword X-form (cnttzd. RA,RS) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10}}, |
| {CNTTZW, 0xfc0007ff00000000, 0x7c00043400000000, 0xf80000000000, // Count Trailing Zeros Word X-form (cnttzw RA,RS) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10}}, |
| {CNTTZWCC, 0xfc0007ff00000000, 0x7c00043500000000, 0xf80000000000, // Count Trailing Zeros Word X-form (cnttzw. RA,RS) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10}}, |
| {COPY, 0xfc2007fe00000000, 0x7c20060c00000000, 0x3c0000100000000, // Copy X-form (copy RA,RB) |
| [6]*argField{ap_Reg_11_15, ap_Reg_16_20}}, |
| {CPABORT, 0xfc0007fe00000000, 0x7c00068c00000000, 0x3fff80100000000, // Copy-Paste Abort X-form (cpabort) |
| [6]*argField{}}, |
| {DARN, 0xfc0007fe00000000, 0x7c0005e600000000, 0x1cf80100000000, // Deliver A Random Number X-form (darn RT,L) |
| [6]*argField{ap_Reg_6_10, ap_ImmUnsigned_14_15}}, |
| {DTSTSFI, 0xfc0007fe00000000, 0xec00054600000000, 0x40000100000000, // DFP Test Significance Immediate X-form (dtstsfi BF,UIM,FRB) |
| [6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_15, ap_FPReg_16_20}}, |
| {DTSTSFIQ, 0xfc0007fe00000000, 0xfc00054600000000, 0x40000100000000, // DFP Test Significance Immediate Quad X-form (dtstsfiq BF,UIM,FRBp) |
| [6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_15, ap_FPReg_16_20}}, |
| {EXTSWSLI, 0xfc0007fd00000000, 0x7c0006f400000000, 0x0, // Extend Sign Word and Shift Left Immediate XS-form (extswsli RA,RS,SH) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}}, |
| {EXTSWSLICC, 0xfc0007fd00000000, 0x7c0006f500000000, 0x0, // Extend Sign Word and Shift Left Immediate XS-form (extswsli. RA,RS,SH) |
| [6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}}, |
| {LDAT, 0xfc0007fe00000000, 0x7c0004cc00000000, 0x100000000, // Load Doubleword ATomic X-form (ldat RT,RA,FC) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}}, |
| {LWAT, 0xfc0007fe00000000, 0x7c00048c00000000, 0x100000000, // Load Word ATomic X-form (lwat RT,RA,FC) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}}, |
| {LXSD, 0xfc00000300000000, 0xe400000200000000, 0x0, // Load VSX Scalar Doubleword DS-form (lxsd VRT,DS(RA)) |
| [6]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}}, |
| {LXSIBZX, 0xfc0007fe00000000, 0x7c00061a00000000, 0x0, // Load VSX Scalar as Integer Byte & Zero Indexed X-form (lxsibzx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXSIHZX, 0xfc0007fe00000000, 0x7c00065a00000000, 0x0, // Load VSX Scalar as Integer Halfword & Zero Indexed X-form (lxsihzx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXSSP, 0xfc00000300000000, 0xe400000300000000, 0x0, // Load VSX Scalar Single-Precision DS-form (lxssp VRT,DS(RA)) |
| [6]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}}, |
| {LXV, 0xfc00000700000000, 0xf400000100000000, 0x0, // Load VSX Vector DQ-form (lxv XT,DQ(RA)) |
| [6]*argField{ap_VecSReg_28_28_6_10, ap_Offset_16_27_shift4, ap_Reg_11_15}}, |
| {LXVB16X, 0xfc0007fe00000000, 0x7c0006d800000000, 0x0, // Load VSX Vector Byte*16 Indexed X-form (lxvb16x XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVH8X, 0xfc0007fe00000000, 0x7c00065800000000, 0x0, // Load VSX Vector Halfword*8 Indexed X-form (lxvh8x XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVL, 0xfc0007fe00000000, 0x7c00021a00000000, 0x0, // Load VSX Vector with Length X-form (lxvl XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVLL, 0xfc0007fe00000000, 0x7c00025a00000000, 0x0, // Load VSX Vector with Length Left-justified X-form (lxvll XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVWSX, 0xfc0007fe00000000, 0x7c0002d800000000, 0x0, // Load VSX Vector Word & Splat Indexed X-form (lxvwsx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXVX, 0xfc0007be00000000, 0x7c00021800000000, 0x4000000000, // Load VSX Vector Indexed X-form (lxvx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {MADDHD, 0xfc00003f00000000, 0x1000003000000000, 0x0, // Multiply-Add High Doubleword VA-form (maddhd RT,RA,RB,RC) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}}, |
| {MADDHDU, 0xfc00003f00000000, 0x1000003100000000, 0x0, // Multiply-Add High Doubleword Unsigned VA-form (maddhdu RT,RA,RB,RC) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}}, |
| {MADDLD, 0xfc00003f00000000, 0x1000003300000000, 0x0, // Multiply-Add Low Doubleword VA-form (maddld RT,RA,RB,RC) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}}, |
| {MCRXRX, 0xfc0007fe00000000, 0x7c00048000000000, 0x7ff80100000000, // Move to CR from XER Extended X-form (mcrxrx BF) |
| [6]*argField{ap_CondRegField_6_8}}, |
| {MFVSRLD, 0xfc0007fe00000000, 0x7c00026600000000, 0xf80000000000, // Move From VSR Lower Doubleword X-form (mfvsrld RA,XS) |
| [6]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}}, |
| {MODSD, 0xfc0007fe00000000, 0x7c00061200000000, 0x100000000, // Modulo Signed Doubleword X-form (modsd RT,RA,RB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {MODSW, 0xfc0007fe00000000, 0x7c00061600000000, 0x100000000, // Modulo Signed Word X-form (modsw RT,RA,RB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {MODUD, 0xfc0007fe00000000, 0x7c00021200000000, 0x100000000, // Modulo Unsigned Doubleword X-form (modud RT,RA,RB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {MODUW, 0xfc0007fe00000000, 0x7c00021600000000, 0x100000000, // Modulo Unsigned Word X-form (moduw RT,RA,RB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {MSGSYNC, 0xfc0007fe00000000, 0x7c0006ec00000000, 0x3fff80100000000, // Message Synchronize X-form (msgsync) |
| [6]*argField{}}, |
| {MTVSRDD, 0xfc0007fe00000000, 0x7c00036600000000, 0x0, // Move To VSR Double Doubleword X-form (mtvsrdd XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {MTVSRWS, 0xfc0007fe00000000, 0x7c00032600000000, 0xf80000000000, // Move To VSR Word & Splat X-form (mtvsrws XT,RA) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}}, |
| {PASTECC, 0xfc0007ff00000000, 0x7c00070d00000000, 0x3c0000000000000, // Paste X-form (paste. RA,RB,L) |
| [6]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_10_10}}, |
| {SETB, 0xfc0007fe00000000, 0x7c00010000000000, 0x3f80100000000, // Set Boolean X-form (setb RT,BFA) |
| [6]*argField{ap_Reg_6_10, ap_CondRegField_11_13}}, |
| {SLBIEG, 0xfc0007fe00000000, 0x7c0003a400000000, 0x1f000100000000, // SLB Invalidate Entry Global X-form (slbieg RS,RB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_16_20}}, |
| {SLBSYNC, 0xfc0007fe00000000, 0x7c0002a400000000, 0x3fff80100000000, // SLB Synchronize X-form (slbsync) |
| [6]*argField{}}, |
| {STDAT, 0xfc0007fe00000000, 0x7c0005cc00000000, 0x100000000, // Store Doubleword ATomic X-form (stdat RS,RA,FC) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}}, |
| {STOP, 0xfc0007fe00000000, 0x4c0002e400000000, 0x3fff80100000000, // Stop XL-form (stop) |
| [6]*argField{}}, |
| {STWAT, 0xfc0007fe00000000, 0x7c00058c00000000, 0x100000000, // Store Word ATomic X-form (stwat RS,RA,FC) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}}, |
| {STXSD, 0xfc00000300000000, 0xf400000200000000, 0x0, // Store VSX Scalar Doubleword DS-form (stxsd VRS,DS(RA)) |
| [6]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}}, |
| {STXSIBX, 0xfc0007fe00000000, 0x7c00071a00000000, 0x0, // Store VSX Scalar as Integer Byte Indexed X-form (stxsibx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXSIHX, 0xfc0007fe00000000, 0x7c00075a00000000, 0x0, // Store VSX Scalar as Integer Halfword Indexed X-form (stxsihx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXSSP, 0xfc00000300000000, 0xf400000300000000, 0x0, // Store VSX Scalar Single DS-form (stxssp VRS,DS(RA)) |
| [6]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}}, |
| {STXV, 0xfc00000700000000, 0xf400000500000000, 0x0, // Store VSX Vector DQ-form (stxv XS,DQ(RA)) |
| [6]*argField{ap_VecSReg_28_28_6_10, ap_Offset_16_27_shift4, ap_Reg_11_15}}, |
| {STXVB16X, 0xfc0007fe00000000, 0x7c0007d800000000, 0x0, // Store VSX Vector Byte*16 Indexed X-form (stxvb16x XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXVH8X, 0xfc0007fe00000000, 0x7c00075800000000, 0x0, // Store VSX Vector Halfword*8 Indexed X-form (stxvh8x XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXVL, 0xfc0007fe00000000, 0x7c00031a00000000, 0x0, // Store VSX Vector with Length X-form (stxvl XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXVLL, 0xfc0007fe00000000, 0x7c00035a00000000, 0x0, // Store VSX Vector with Length Left-justified X-form (stxvll XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXVX, 0xfc0007fe00000000, 0x7c00031800000000, 0x0, // Store VSX Vector Indexed X-form (stxvx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VABSDUB, 0xfc0007ff00000000, 0x1000040300000000, 0x0, // Vector Absolute Difference Unsigned Byte VX-form (vabsdub VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VABSDUH, 0xfc0007ff00000000, 0x1000044300000000, 0x0, // Vector Absolute Difference Unsigned Halfword VX-form (vabsduh VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VABSDUW, 0xfc0007ff00000000, 0x1000048300000000, 0x0, // Vector Absolute Difference Unsigned Word VX-form (vabsduw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VBPERMD, 0xfc0007ff00000000, 0x100005cc00000000, 0x0, // Vector Bit Permute Doubleword VX-form (vbpermd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCLZLSBB, 0xfc1f07ff00000000, 0x1000060200000000, 0x0, // Vector Count Leading Zero Least-Significant Bits Byte VX-form (vclzlsbb RT,VRB) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20}}, |
| {VCMPNEB, 0xfc0007ff00000000, 0x1000000700000000, 0x0, // Vector Compare Not Equal Byte VC-form (vcmpneb VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEBCC, 0xfc0007ff00000000, 0x1000040700000000, 0x0, // Vector Compare Not Equal Byte VC-form (vcmpneb. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEH, 0xfc0007ff00000000, 0x1000004700000000, 0x0, // Vector Compare Not Equal Halfword VC-form (vcmpneh VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEHCC, 0xfc0007ff00000000, 0x1000044700000000, 0x0, // Vector Compare Not Equal Halfword VC-form (vcmpneh. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEW, 0xfc0007ff00000000, 0x1000008700000000, 0x0, // Vector Compare Not Equal Word VC-form (vcmpnew VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEWCC, 0xfc0007ff00000000, 0x1000048700000000, 0x0, // Vector Compare Not Equal Word VC-form (vcmpnew. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEZB, 0xfc0007ff00000000, 0x1000010700000000, 0x0, // Vector Compare Not Equal or Zero Byte VC-form (vcmpnezb VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEZBCC, 0xfc0007ff00000000, 0x1000050700000000, 0x0, // Vector Compare Not Equal or Zero Byte VC-form (vcmpnezb. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEZH, 0xfc0007ff00000000, 0x1000014700000000, 0x0, // Vector Compare Not Equal or Zero Halfword VC-form (vcmpnezh VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEZHCC, 0xfc0007ff00000000, 0x1000054700000000, 0x0, // Vector Compare Not Equal or Zero Halfword VC-form (vcmpnezh. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEZW, 0xfc0007ff00000000, 0x1000018700000000, 0x0, // Vector Compare Not Equal or Zero Word VC-form (vcmpnezw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPNEZWCC, 0xfc0007ff00000000, 0x1000058700000000, 0x0, // Vector Compare Not Equal or Zero Word VC-form (vcmpnezw. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCTZB, 0xfc1f07ff00000000, 0x101c060200000000, 0x0, // Vector Count Trailing Zeros Byte VX-form (vctzb VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VCTZD, 0xfc1f07ff00000000, 0x101f060200000000, 0x0, // Vector Count Trailing Zeros Doubleword VX-form (vctzd VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VCTZH, 0xfc1f07ff00000000, 0x101d060200000000, 0x0, // Vector Count Trailing Zeros Halfword VX-form (vctzh VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VCTZLSBB, 0xfc1f07ff00000000, 0x1001060200000000, 0x0, // Vector Count Trailing Zero Least-Significant Bits Byte VX-form (vctzlsbb RT,VRB) |
| [6]*argField{ap_Reg_6_10, ap_VecReg_16_20}}, |
| {VCTZW, 0xfc1f07ff00000000, 0x101e060200000000, 0x0, // Vector Count Trailing Zeros Word VX-form (vctzw VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXTRACTD, 0xfc0007ff00000000, 0x100002cd00000000, 0x10000000000000, // Vector Extract Doubleword to VSR using immediate-specified index VX-form (vextractd VRT,VRB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}}, |
| {VEXTRACTUB, 0xfc0007ff00000000, 0x1000020d00000000, 0x10000000000000, // Vector Extract Unsigned Byte to VSR using immediate-specified index VX-form (vextractub VRT,VRB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}}, |
| {VEXTRACTUH, 0xfc0007ff00000000, 0x1000024d00000000, 0x10000000000000, // Vector Extract Unsigned Halfword to VSR using immediate-specified index VX-form (vextractuh VRT,VRB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}}, |
| {VEXTRACTUW, 0xfc0007ff00000000, 0x1000028d00000000, 0x10000000000000, // Vector Extract Unsigned Word to VSR using immediate-specified index VX-form (vextractuw VRT,VRB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}}, |
| {VEXTSB2D, 0xfc1f07ff00000000, 0x1018060200000000, 0x0, // Vector Extend Sign Byte To Doubleword VX-form (vextsb2d VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXTSB2W, 0xfc1f07ff00000000, 0x1010060200000000, 0x0, // Vector Extend Sign Byte To Word VX-form (vextsb2w VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXTSH2D, 0xfc1f07ff00000000, 0x1019060200000000, 0x0, // Vector Extend Sign Halfword To Doubleword VX-form (vextsh2d VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXTSH2W, 0xfc1f07ff00000000, 0x1011060200000000, 0x0, // Vector Extend Sign Halfword To Word VX-form (vextsh2w VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXTSW2D, 0xfc1f07ff00000000, 0x101a060200000000, 0x0, // Vector Extend Sign Word To Doubleword VX-form (vextsw2d VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VEXTUBLX, 0xfc0007ff00000000, 0x1000060d00000000, 0x0, // Vector Extract Unsigned Byte to GPR using GPR-specified Left-Index VX-form (vextublx RT,RA,VRB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VEXTUBRX, 0xfc0007ff00000000, 0x1000070d00000000, 0x0, // Vector Extract Unsigned Byte to GPR using GPR-specified Right-Index VX-form (vextubrx RT,RA,VRB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VEXTUHLX, 0xfc0007ff00000000, 0x1000064d00000000, 0x0, // Vector Extract Unsigned Halfword to GPR using GPR-specified Left-Index VX-form (vextuhlx RT,RA,VRB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VEXTUHRX, 0xfc0007ff00000000, 0x1000074d00000000, 0x0, // Vector Extract Unsigned Halfword to GPR using GPR-specified Right-Index VX-form (vextuhrx RT,RA,VRB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VEXTUWLX, 0xfc0007ff00000000, 0x1000068d00000000, 0x0, // Vector Extract Unsigned Word to GPR using GPR-specified Left-Index VX-form (vextuwlx RT,RA,VRB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VEXTUWRX, 0xfc0007ff00000000, 0x1000078d00000000, 0x0, // Vector Extract Unsigned Word to GPR using GPR-specified Right-Index VX-form (vextuwrx RT,RA,VRB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}}, |
| {VINSERTB, 0xfc0007ff00000000, 0x1000030d00000000, 0x10000000000000, // Vector Insert Byte from VSR using immediate-specified index VX-form (vinsertb VRT,VRB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}}, |
| {VINSERTD, 0xfc0007ff00000000, 0x100003cd00000000, 0x10000000000000, // Vector Insert Doubleword from VSR using immediate-specified index VX-form (vinsertd VRT,VRB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}}, |
| {VINSERTH, 0xfc0007ff00000000, 0x1000034d00000000, 0x10000000000000, // Vector Insert Halfword from VSR using immediate-specified index VX-form (vinserth VRT,VRB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}}, |
| {VINSERTW, 0xfc0007ff00000000, 0x1000038d00000000, 0x10000000000000, // Vector Insert Word from VSR using immediate-specified index VX-form (vinsertw VRT,VRB,UIM) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}}, |
| {VMUL10CUQ, 0xfc0007ff00000000, 0x1000000100000000, 0xf80000000000, // Vector Multiply-by-10 & write Carry-out Unsigned Quadword VX-form (vmul10cuq VRT,VRA) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15}}, |
| {VMUL10ECUQ, 0xfc0007ff00000000, 0x1000004100000000, 0x0, // Vector Multiply-by-10 Extended & write Carry-out Unsigned Quadword VX-form (vmul10ecuq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMUL10EUQ, 0xfc0007ff00000000, 0x1000024100000000, 0x0, // Vector Multiply-by-10 Extended Unsigned Quadword VX-form (vmul10euq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMUL10UQ, 0xfc0007ff00000000, 0x1000020100000000, 0xf80000000000, // Vector Multiply-by-10 Unsigned Quadword VX-form (vmul10uq VRT,VRA) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15}}, |
| {VNEGD, 0xfc1f07ff00000000, 0x1007060200000000, 0x0, // Vector Negate Doubleword VX-form (vnegd VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VNEGW, 0xfc1f07ff00000000, 0x1006060200000000, 0x0, // Vector Negate Word VX-form (vnegw VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VPERMR, 0xfc00003f00000000, 0x1000003b00000000, 0x0, // Vector Permute Right-indexed VA-form (vpermr VRT,VRA,VRB,VRC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}}, |
| {VPRTYBD, 0xfc1f07ff00000000, 0x1009060200000000, 0x0, // Vector Parity Byte Doubleword VX-form (vprtybd VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VPRTYBQ, 0xfc1f07ff00000000, 0x100a060200000000, 0x0, // Vector Parity Byte Quadword VX-form (vprtybq VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VPRTYBW, 0xfc1f07ff00000000, 0x1008060200000000, 0x0, // Vector Parity Byte Word VX-form (vprtybw VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VRLDMI, 0xfc0007ff00000000, 0x100000c500000000, 0x0, // Vector Rotate Left Doubleword then Mask Insert VX-form (vrldmi VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VRLDNM, 0xfc0007ff00000000, 0x100001c500000000, 0x0, // Vector Rotate Left Doubleword then AND with Mask VX-form (vrldnm VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VRLWMI, 0xfc0007ff00000000, 0x1000008500000000, 0x0, // Vector Rotate Left Word then Mask Insert VX-form (vrlwmi VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VRLWNM, 0xfc0007ff00000000, 0x1000018500000000, 0x0, // Vector Rotate Left Word then AND with Mask VX-form (vrlwnm VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSLV, 0xfc0007ff00000000, 0x1000074400000000, 0x0, // Vector Shift Left Variable VX-form (vslv VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSRV, 0xfc0007ff00000000, 0x1000070400000000, 0x0, // Vector Shift Right Variable VX-form (vsrv VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {WAIT, 0xfc0007fe00000000, 0x7c00003c00000000, 0x9cf80100000000, // Wait X-form (wait WC,PL) |
| [6]*argField{ap_ImmUnsigned_9_10, ap_ImmUnsigned_14_15}}, |
| {XSABSQP, 0xfc1f07fe00000000, 0xfc00064800000000, 0x100000000, // VSX Scalar Absolute Quad-Precision X-form (xsabsqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSADDQP, 0xfc0007ff00000000, 0xfc00000800000000, 0x0, // VSX Scalar Add Quad-Precision [using round to Odd] X-form (xsaddqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSADDQPO, 0xfc0007ff00000000, 0xfc00000900000000, 0x0, // VSX Scalar Add Quad-Precision [using round to Odd] X-form (xsaddqpo VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSCMPEQDP, 0xfc0007f800000000, 0xf000001800000000, 0x0, // VSX Scalar Compare Equal Double-Precision XX3-form (xscmpeqdp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XSCMPEXPDP, 0xfc0007f800000000, 0xf00001d800000000, 0x60000100000000, // VSX Scalar Compare Exponents Double-Precision XX3-form (xscmpexpdp BF,XA,XB) |
| [6]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XSCMPEXPQP, 0xfc0007fe00000000, 0xfc00014800000000, 0x60000100000000, // VSX Scalar Compare Exponents Quad-Precision X-form (xscmpexpqp BF,VRA,VRB) |
| [6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSCMPGEDP, 0xfc0007f800000000, 0xf000009800000000, 0x0, // VSX Scalar Compare Greater Than or Equal Double-Precision XX3-form (xscmpgedp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XSCMPGTDP, 0xfc0007f800000000, 0xf000005800000000, 0x0, // VSX Scalar Compare Greater Than Double-Precision XX3-form (xscmpgtdp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XSCMPOQP, 0xfc0007fe00000000, 0xfc00010800000000, 0x60000100000000, // VSX Scalar Compare Ordered Quad-Precision X-form (xscmpoqp BF,VRA,VRB) |
| [6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSCMPUQP, 0xfc0007fe00000000, 0xfc00050800000000, 0x60000100000000, // VSX Scalar Compare Unordered Quad-Precision X-form (xscmpuqp BF,VRA,VRB) |
| [6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSCPSGNQP, 0xfc0007fe00000000, 0xfc0000c800000000, 0x100000000, // VSX Scalar Copy Sign Quad-Precision X-form (xscpsgnqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSCVDPHP, 0xfc1f07fc00000000, 0xf011056c00000000, 0x0, // VSX Scalar Convert with round Double-Precision to Half-Precision format XX2-form (xscvdphp XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XSCVDPQP, 0xfc1f07fe00000000, 0xfc16068800000000, 0x100000000, // VSX Scalar Convert Double-Precision to Quad-Precision format X-form (xscvdpqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVHPDP, 0xfc1f07fc00000000, 0xf010056c00000000, 0x0, // VSX Scalar Convert Half-Precision to Double-Precision format XX2-form (xscvhpdp XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XSCVQPDP, 0xfc1f07ff00000000, 0xfc14068800000000, 0x0, // VSX Scalar Convert with round Quad-Precision to Double-Precision format [using round to Odd] X-form (xscvqpdp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVQPDPO, 0xfc1f07ff00000000, 0xfc14068900000000, 0x0, // VSX Scalar Convert with round Quad-Precision to Double-Precision format [using round to Odd] X-form (xscvqpdpo VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVQPSDZ, 0xfc1f07fe00000000, 0xfc19068800000000, 0x100000000, // VSX Scalar Convert with round to zero Quad-Precision to Signed Doubleword format X-form (xscvqpsdz VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVQPSWZ, 0xfc1f07fe00000000, 0xfc09068800000000, 0x100000000, // VSX Scalar Convert with round to zero Quad-Precision to Signed Word format X-form (xscvqpswz VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVQPUDZ, 0xfc1f07fe00000000, 0xfc11068800000000, 0x100000000, // VSX Scalar Convert with round to zero Quad-Precision to Unsigned Doubleword format X-form (xscvqpudz VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVQPUWZ, 0xfc1f07fe00000000, 0xfc01068800000000, 0x100000000, // VSX Scalar Convert with round to zero Quad-Precision to Unsigned Word format X-form (xscvqpuwz VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVSDQP, 0xfc1f07fe00000000, 0xfc0a068800000000, 0x100000000, // VSX Scalar Convert Signed Doubleword to Quad-Precision format X-form (xscvsdqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSCVUDQP, 0xfc1f07fe00000000, 0xfc02068800000000, 0x100000000, // VSX Scalar Convert Unsigned Doubleword to Quad-Precision format X-form (xscvudqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSDIVQP, 0xfc0007ff00000000, 0xfc00044800000000, 0x0, // VSX Scalar Divide Quad-Precision [using round to Odd] X-form (xsdivqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSDIVQPO, 0xfc0007ff00000000, 0xfc00044900000000, 0x0, // VSX Scalar Divide Quad-Precision [using round to Odd] X-form (xsdivqpo VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSIEXPDP, 0xfc0007fe00000000, 0xf000072c00000000, 0x0, // VSX Scalar Insert Exponent Double-Precision X-form (xsiexpdp XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {XSIEXPQP, 0xfc0007fe00000000, 0xfc0006c800000000, 0x100000000, // VSX Scalar Insert Exponent Quad-Precision X-form (xsiexpqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSMADDQP, 0xfc0007ff00000000, 0xfc00030800000000, 0x0, // VSX Scalar Multiply-Add Quad-Precision [using round to Odd] X-form (xsmaddqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSMADDQPO, 0xfc0007ff00000000, 0xfc00030900000000, 0x0, // VSX Scalar Multiply-Add Quad-Precision [using round to Odd] X-form (xsmaddqpo VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSMAXCDP, 0xfc0007f800000000, 0xf000040000000000, 0x0, // VSX Scalar Maximum Type-C Double-Precision XX3-form (xsmaxcdp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XSMAXJDP, 0xfc0007f800000000, 0xf000048000000000, 0x0, // VSX Scalar Maximum Type-J Double-Precision XX3-form (xsmaxjdp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XSMINCDP, 0xfc0007f800000000, 0xf000044000000000, 0x0, // VSX Scalar Minimum Type-C Double-Precision XX3-form (xsmincdp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XSMINJDP, 0xfc0007f800000000, 0xf00004c000000000, 0x0, // VSX Scalar Minimum Type-J Double-Precision XX3-form (xsminjdp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XSMSUBQP, 0xfc0007ff00000000, 0xfc00034800000000, 0x0, // VSX Scalar Multiply-Subtract Quad-Precision [using round to Odd] X-form (xsmsubqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSMSUBQPO, 0xfc0007ff00000000, 0xfc00034900000000, 0x0, // VSX Scalar Multiply-Subtract Quad-Precision [using round to Odd] X-form (xsmsubqpo VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSMULQP, 0xfc0007ff00000000, 0xfc00004800000000, 0x0, // VSX Scalar Multiply Quad-Precision [using round to Odd] X-form (xsmulqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSMULQPO, 0xfc0007ff00000000, 0xfc00004900000000, 0x0, // VSX Scalar Multiply Quad-Precision [using round to Odd] X-form (xsmulqpo VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSNABSQP, 0xfc1f07fe00000000, 0xfc08064800000000, 0x0, // VSX Scalar Negative Absolute Quad-Precision X-form (xsnabsqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSNEGQP, 0xfc1f07fe00000000, 0xfc10064800000000, 0x100000000, // VSX Scalar Negate Quad-Precision X-form (xsnegqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSNMADDQP, 0xfc0007ff00000000, 0xfc00038800000000, 0x0, // VSX Scalar Negative Multiply-Add Quad-Precision [using round to Odd] X-form (xsnmaddqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSNMADDQPO, 0xfc0007ff00000000, 0xfc00038900000000, 0x0, // VSX Scalar Negative Multiply-Add Quad-Precision [using round to Odd] X-form (xsnmaddqpo VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSNMSUBQP, 0xfc0007ff00000000, 0xfc0003c800000000, 0x0, // VSX Scalar Negative Multiply-Subtract Quad-Precision [using round to Odd] X-form (xsnmsubqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSNMSUBQPO, 0xfc0007ff00000000, 0xfc0003c900000000, 0x0, // VSX Scalar Negative Multiply-Subtract Quad-Precision [using round to Odd] X-form (xsnmsubqpo VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSRQPI, 0xfc0001ff00000000, 0xfc00000a00000000, 0x1e000000000000, // VSX Scalar Round to Quad-Precision Integer [with Inexact] Z23-form (xsrqpi R,VRT,VRB,RMC) |
| [6]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}}, |
| {XSRQPIX, 0xfc0001ff00000000, 0xfc00000b00000000, 0x1e000000000000, // VSX Scalar Round to Quad-Precision Integer [with Inexact] Z23-form (xsrqpix R,VRT,VRB,RMC) |
| [6]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}}, |
| {XSRQPXP, 0xfc0001fe00000000, 0xfc00004a00000000, 0x1e000100000000, // VSX Scalar Round Quad-Precision to Double-Extended Precision Z23-form (xsrqpxp R,VRT,VRB,RMC) |
| [6]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}}, |
| {XSSQRTQP, 0xfc1f07ff00000000, 0xfc1b064800000000, 0x0, // VSX Scalar Square Root Quad-Precision [using round to Odd] X-form (xssqrtqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSSQRTQPO, 0xfc1f07ff00000000, 0xfc1b064900000000, 0x0, // VSX Scalar Square Root Quad-Precision [using round to Odd] X-form (xssqrtqpo VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSSUBQP, 0xfc0007ff00000000, 0xfc00040800000000, 0x0, // VSX Scalar Subtract Quad-Precision [using round to Odd] X-form (xssubqp VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSSUBQPO, 0xfc0007ff00000000, 0xfc00040900000000, 0x0, // VSX Scalar Subtract Quad-Precision [using round to Odd] X-form (xssubqpo VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {XSTSTDCDP, 0xfc0007fc00000000, 0xf00005a800000000, 0x100000000, // VSX Scalar Test Data Class Double-Precision XX2-form (xststdcdp BF,XB,DCMX) |
| [6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_9_15}}, |
| {XSTSTDCQP, 0xfc0007fe00000000, 0xfc00058800000000, 0x100000000, // VSX Scalar Test Data Class Quad-Precision X-form (xststdcqp BF,VRB,DCMX) |
| [6]*argField{ap_CondRegField_6_8, ap_VecReg_16_20, ap_ImmUnsigned_9_15}}, |
| {XSTSTDCSP, 0xfc0007fc00000000, 0xf00004a800000000, 0x100000000, // VSX Scalar Test Data Class Single-Precision XX2-form (xststdcsp BF,XB,DCMX) |
| [6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_9_15}}, |
| {XSXEXPDP, 0xfc1f07fc00000000, 0xf000056c00000000, 0x100000000, // VSX Scalar Extract Exponent Double-Precision XX2-form (xsxexpdp RT,XB) |
| [6]*argField{ap_Reg_6_10, ap_VecSReg_30_30_16_20}}, |
| {XSXEXPQP, 0xfc1f07fe00000000, 0xfc02064800000000, 0x100000000, // VSX Scalar Extract Exponent Quad-Precision X-form (xsxexpqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XSXSIGDP, 0xfc1f07fc00000000, 0xf001056c00000000, 0x100000000, // VSX Scalar Extract Significand Double-Precision XX2-form (xsxsigdp RT,XB) |
| [6]*argField{ap_Reg_6_10, ap_VecSReg_30_30_16_20}}, |
| {XSXSIGQP, 0xfc1f07fe00000000, 0xfc12064800000000, 0x100000000, // VSX Scalar Extract Significand Quad-Precision X-form (xsxsigqp VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {XVCVHPSP, 0xfc1f07fc00000000, 0xf018076c00000000, 0x0, // VSX Vector Convert Half-Precision to Single-Precision format XX2-form (xvcvhpsp XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XVCVSPHP, 0xfc1f07fc00000000, 0xf019076c00000000, 0x0, // VSX Vector Convert with round Single-Precision to Half-Precision format XX2-form (xvcvsphp XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XVIEXPDP, 0xfc0007f800000000, 0xf00007c000000000, 0x0, // VSX Vector Insert Exponent Double-Precision XX3-form (xviexpdp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVIEXPSP, 0xfc0007f800000000, 0xf00006c000000000, 0x0, // VSX Vector Insert Exponent Single-Precision XX3-form (xviexpsp XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XVTSTDCDP, 0xfc0007b800000000, 0xf00007a800000000, 0x0, // VSX Vector Test Data Class Double-Precision XX2-form (xvtstdcdp XT,XB,DCMX) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_25_25_29_29_11_15}}, |
| {XVTSTDCSP, 0xfc0007b800000000, 0xf00006a800000000, 0x0, // VSX Vector Test Data Class Single-Precision XX2-form (xvtstdcsp XT,XB,DCMX) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_25_25_29_29_11_15}}, |
| {XVXEXPDP, 0xfc1f07fc00000000, 0xf000076c00000000, 0x0, // VSX Vector Extract Exponent Double-Precision XX2-form (xvxexpdp XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XVXEXPSP, 0xfc1f07fc00000000, 0xf008076c00000000, 0x0, // VSX Vector Extract Exponent Single-Precision XX2-form (xvxexpsp XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XVXSIGDP, 0xfc1f07fc00000000, 0xf001076c00000000, 0x0, // VSX Vector Extract Significand Double-Precision XX2-form (xvxsigdp XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XVXSIGSP, 0xfc1f07fc00000000, 0xf009076c00000000, 0x0, // VSX Vector Extract Significand Single-Precision XX2-form (xvxsigsp XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XXBRD, 0xfc1f07fc00000000, 0xf017076c00000000, 0x0, // VSX Vector Byte-Reverse Doubleword XX2-form (xxbrd XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XXBRH, 0xfc1f07fc00000000, 0xf007076c00000000, 0x0, // VSX Vector Byte-Reverse Halfword XX2-form (xxbrh XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XXBRQ, 0xfc1f07fc00000000, 0xf01f076c00000000, 0x0, // VSX Vector Byte-Reverse Quadword XX2-form (xxbrq XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XXBRW, 0xfc1f07fc00000000, 0xf00f076c00000000, 0x0, // VSX Vector Byte-Reverse Word XX2-form (xxbrw XT,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}}, |
| {XXEXTRACTUW, 0xfc0007fc00000000, 0xf000029400000000, 0x10000000000000, // VSX Vector Extract Unsigned Word XX2-form (xxextractuw XT,XB,UIM) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_12_15}}, |
| {XXINSERTW, 0xfc0007fc00000000, 0xf00002d400000000, 0x10000000000000, // VSX Vector Insert Word XX2-form (xxinsertw XT,XB,UIM) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_12_15}}, |
| {XXPERM, 0xfc0007f800000000, 0xf00000d000000000, 0x0, // VSX Vector Permute XX3-form (xxperm XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XXPERMR, 0xfc0007f800000000, 0xf00001d000000000, 0x0, // VSX Vector Permute Right-indexed XX3-form (xxpermr XT,XA,XB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}}, |
| {XXSPLTIB, 0xfc1807fe00000000, 0xf00002d000000000, 0x0, // VSX Vector Splat Immediate Byte X-form (xxspltib XT,IMM8) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_ImmUnsigned_13_20}}, |
| {BCDADDCC, 0xfc0005ff00000000, 0x1000040100000000, 0x0, // Decimal Add Modulo VX-form (bcdadd. VRT,VRA,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCDSUBCC, 0xfc0005ff00000000, 0x1000044100000000, 0x0, // Decimal Subtract Modulo VX-form (bcdsub. VRT,VRA,VRB,PS) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}}, |
| {BCTAR, 0xfc0007ff00000000, 0x4c00046000000000, 0xe00000000000, // Branch Conditional to Branch Target Address Register XL-form (bctar BO,BI,BH) |
| [6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}}, |
| {BCTARL, 0xfc0007ff00000000, 0x4c00046100000000, 0xe00000000000, // Branch Conditional to Branch Target Address Register XL-form (bctarl BO,BI,BH) |
| [6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}}, |
| {CLRBHRB, 0xfc0007fe00000000, 0x7c00035c00000000, 0x3fff80100000000, // Clear BHRB X-form (clrbhrb) |
| [6]*argField{}}, |
| {FMRGEW, 0xfc0007fe00000000, 0xfc00078c00000000, 0x100000000, // Floating Merge Even Word X-form (fmrgew FRT,FRA,FRB) |
| [6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}}, |
| {FMRGOW, 0xfc0007fe00000000, 0xfc00068c00000000, 0x100000000, // Floating Merge Odd Word X-form (fmrgow FRT,FRA,FRB) |
| [6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}}, |
| {ICBT, 0xfc0007fe00000000, 0x7c00002c00000000, 0x200000100000000, // Instruction Cache Block Touch X-form (icbt CT, RA, RB) |
| [6]*argField{ap_ImmUnsigned_7_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LQARX, 0xfc0007fe00000000, 0x7c00022800000000, 0x0, // Load Quadword And Reserve Indexed X-form (lqarx RTp,RA,RB,EH) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}}, |
| {LXSIWAX, 0xfc0007fe00000000, 0x7c00009800000000, 0x0, // Load VSX Scalar as Integer Word Algebraic Indexed X-form (lxsiwax XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXSIWZX, 0xfc0007fe00000000, 0x7c00001800000000, 0x0, // Load VSX Scalar as Integer Word & Zero Indexed X-form (lxsiwzx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {LXSSPX, 0xfc0007fe00000000, 0x7c00041800000000, 0x0, // Load VSX Scalar Single-Precision Indexed X-form (lxsspx XT,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {MFBHRBE, 0xfc0007fe00000000, 0x7c00025c00000000, 0x100000000, // Move From BHRB XFX-form (mfbhrbe RT,BHRBE) |
| [6]*argField{ap_Reg_6_10, ap_ImmUnsigned_11_20}}, |
| {MFVSRD, 0xfc0007fe00000000, 0x7c00006600000000, 0xf80000000000, // Move From VSR Doubleword X-form (mfvsrd RA,XS) |
| [6]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}}, |
| {MFVSRWZ, 0xfc0007fe00000000, 0x7c0000e600000000, 0xf80000000000, // Move From VSR Word and Zero X-form (mfvsrwz RA,XS) |
| [6]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}}, |
| {MSGCLR, 0xfc0007fe00000000, 0x7c0001dc00000000, 0x3ff000100000000, // Message Clear X-form (msgclr RB) |
| [6]*argField{ap_Reg_16_20}}, |
| {MSGCLRP, 0xfc0007fe00000000, 0x7c00015c00000000, 0x3ff000100000000, // Message Clear Privileged X-form (msgclrp RB) |
| [6]*argField{ap_Reg_16_20}}, |
| {MSGSND, 0xfc0007fe00000000, 0x7c00019c00000000, 0x3ff000100000000, // Message Send X-form (msgsnd RB) |
| [6]*argField{ap_Reg_16_20}}, |
| {MSGSNDP, 0xfc0007fe00000000, 0x7c00011c00000000, 0x3ff000100000000, // Message Send Privileged X-form (msgsndp RB) |
| [6]*argField{ap_Reg_16_20}}, |
| {MTVSRD, 0xfc0007fe00000000, 0x7c00016600000000, 0xf80000000000, // Move To VSR Doubleword X-form (mtvsrd XT,RA) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}}, |
| {MTVSRWA, 0xfc0007fe00000000, 0x7c0001a600000000, 0xf80000000000, // Move To VSR Word Algebraic X-form (mtvsrwa XT,RA) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}}, |
| {MTVSRWZ, 0xfc0007fe00000000, 0x7c0001e600000000, 0xf80000000000, // Move To VSR Word and Zero X-form (mtvsrwz XT,RA) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}}, |
| {RFEBB, 0xfc0007fe00000000, 0x4c00012400000000, 0x3fff00100000000, // Return from Event Based Branch XL-form (rfebb S) |
| [6]*argField{ap_ImmUnsigned_20_20}}, |
| {STQCXCC, 0xfc0007ff00000000, 0x7c00016d00000000, 0x0, // Store Quadword Conditional Indexed X-form (stqcx. RSp,RA,RB) |
| [6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXSIWX, 0xfc0007fe00000000, 0x7c00011800000000, 0x0, // Store VSX Scalar as Integer Word Indexed X-form (stxsiwx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {STXSSPX, 0xfc0007fe00000000, 0x7c00051800000000, 0x0, // Store VSX Scalar Single-Precision Indexed X-form (stxsspx XS,RA,RB) |
| [6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}}, |
| {VADDCUQ, 0xfc0007ff00000000, 0x1000014000000000, 0x0, // Vector Add & write Carry Unsigned Quadword VX-form (vaddcuq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VADDECUQ, 0xfc00003f00000000, 0x1000003d00000000, 0x0, // Vector Add Extended & write Carry Unsigned Quadword VA-form (vaddecuq VRT,VRA,VRB,VRC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}}, |
| {VADDEUQM, 0xfc00003f00000000, 0x1000003c00000000, 0x0, // Vector Add Extended Unsigned Quadword Modulo VA-form (vaddeuqm VRT,VRA,VRB,VRC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}}, |
| {VADDUDM, 0xfc0007ff00000000, 0x100000c000000000, 0x0, // Vector Add Unsigned Doubleword Modulo VX-form (vaddudm VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VADDUQM, 0xfc0007ff00000000, 0x1000010000000000, 0x0, // Vector Add Unsigned Quadword Modulo VX-form (vadduqm VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VBPERMQ, 0xfc0007ff00000000, 0x1000054c00000000, 0x0, // Vector Bit Permute Quadword VX-form (vbpermq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCIPHER, 0xfc0007ff00000000, 0x1000050800000000, 0x0, // Vector AES Cipher VX-form (vcipher VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCIPHERLAST, 0xfc0007ff00000000, 0x1000050900000000, 0x0, // Vector AES Cipher Last VX-form (vcipherlast VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCLZB, 0xfc0007ff00000000, 0x1000070200000000, 0x1f000000000000, // Vector Count Leading Zeros Byte VX-form (vclzb VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VCLZD, 0xfc0007ff00000000, 0x100007c200000000, 0x1f000000000000, // Vector Count Leading Zeros Doubleword VX-form (vclzd VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VCLZH, 0xfc0007ff00000000, 0x1000074200000000, 0x1f000000000000, // Vector Count Leading Zeros Halfword VX-form (vclzh VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VCLZW, 0xfc0007ff00000000, 0x1000078200000000, 0x1f000000000000, // Vector Count Leading Zeros Word VX-form (vclzw VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VCMPEQUD, 0xfc0007ff00000000, 0x100000c700000000, 0x0, // Vector Compare Equal Unsigned Doubleword VC-form (vcmpequd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPEQUDCC, 0xfc0007ff00000000, 0x100004c700000000, 0x0, // Vector Compare Equal Unsigned Doubleword VC-form (vcmpequd. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPGTSD, 0xfc0007ff00000000, 0x100003c700000000, 0x0, // Vector Compare Greater Than Signed Doubleword VC-form (vcmpgtsd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPGTSDCC, 0xfc0007ff00000000, 0x100007c700000000, 0x0, // Vector Compare Greater Than Signed Doubleword VC-form (vcmpgtsd. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPGTUD, 0xfc0007ff00000000, 0x100002c700000000, 0x0, // Vector Compare Greater Than Unsigned Doubleword VC-form (vcmpgtud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VCMPGTUDCC, 0xfc0007ff00000000, 0x100006c700000000, 0x0, // Vector Compare Greater Than Unsigned Doubleword VC-form (vcmpgtud. VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VEQV, 0xfc0007ff00000000, 0x1000068400000000, 0x0, // Vector Logical Equivalence VX-form (veqv VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VGBBD, 0xfc0007ff00000000, 0x1000050c00000000, 0x1f000000000000, // Vector Gather Bits by Bytes by Doubleword VX-form (vgbbd VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VMAXSD, 0xfc0007ff00000000, 0x100001c200000000, 0x0, // Vector Maximum Signed Doubleword VX-form (vmaxsd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMAXUD, 0xfc0007ff00000000, 0x100000c200000000, 0x0, // Vector Maximum Unsigned Doubleword VX-form (vmaxud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMINSD, 0xfc0007ff00000000, 0x100003c200000000, 0x0, // Vector Minimum Signed Doubleword VX-form (vminsd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMINUD, 0xfc0007ff00000000, 0x100002c200000000, 0x0, // Vector Minimum Unsigned Doubleword VX-form (vminud VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMRGEW, 0xfc0007ff00000000, 0x1000078c00000000, 0x0, // Vector Merge Even Word VX-form (vmrgew VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMRGOW, 0xfc0007ff00000000, 0x1000068c00000000, 0x0, // Vector Merge Odd Word VX-form (vmrgow VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULESW, 0xfc0007ff00000000, 0x1000038800000000, 0x0, // Vector Multiply Even Signed Word VX-form (vmulesw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULEUW, 0xfc0007ff00000000, 0x1000028800000000, 0x0, // Vector Multiply Even Unsigned Word VX-form (vmuleuw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULOSW, 0xfc0007ff00000000, 0x1000018800000000, 0x0, // Vector Multiply Odd Signed Word VX-form (vmulosw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULOUW, 0xfc0007ff00000000, 0x1000008800000000, 0x0, // Vector Multiply Odd Unsigned Word VX-form (vmulouw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VMULUWM, 0xfc0007ff00000000, 0x1000008900000000, 0x0, // Vector Multiply Unsigned Word Modulo VX-form (vmuluwm VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VNAND, 0xfc0007ff00000000, 0x1000058400000000, 0x0, // Vector Logical NAND VX-form (vnand VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VNCIPHER, 0xfc0007ff00000000, 0x1000054800000000, 0x0, // Vector AES Inverse Cipher VX-form (vncipher VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VNCIPHERLAST, 0xfc0007ff00000000, 0x1000054900000000, 0x0, // Vector AES Inverse Cipher Last VX-form (vncipherlast VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VORC, 0xfc0007ff00000000, 0x1000054400000000, 0x0, // Vector Logical OR with Complement VX-form (vorc VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPERMXOR, 0xfc00003f00000000, 0x1000002d00000000, 0x0, // Vector Permute & Exclusive-OR VA-form (vpermxor VRT,VRA,VRB,VRC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}}, |
| {VPKSDSS, 0xfc0007ff00000000, 0x100005ce00000000, 0x0, // Vector Pack Signed Doubleword Signed Saturate VX-form (vpksdss VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPKSDUS, 0xfc0007ff00000000, 0x1000054e00000000, 0x0, // Vector Pack Signed Doubleword Unsigned Saturate VX-form (vpksdus VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPKUDUM, 0xfc0007ff00000000, 0x1000044e00000000, 0x0, // Vector Pack Unsigned Doubleword Unsigned Modulo VX-form (vpkudum VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPKUDUS, 0xfc0007ff00000000, 0x100004ce00000000, 0x0, // Vector Pack Unsigned Doubleword Unsigned Saturate VX-form (vpkudus VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPMSUMB, 0xfc0007ff00000000, 0x1000040800000000, 0x0, // Vector Polynomial Multiply-Sum Byte VX-form (vpmsumb VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPMSUMD, 0xfc0007ff00000000, 0x100004c800000000, 0x0, // Vector Polynomial Multiply-Sum Doubleword VX-form (vpmsumd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPMSUMH, 0xfc0007ff00000000, 0x1000044800000000, 0x0, // Vector Polynomial Multiply-Sum Halfword VX-form (vpmsumh VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPMSUMW, 0xfc0007ff00000000, 0x1000048800000000, 0x0, // Vector Polynomial Multiply-Sum Word VX-form (vpmsumw VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VPOPCNTB, 0xfc0007ff00000000, 0x1000070300000000, 0x1f000000000000, // Vector Population Count Byte VX-form (vpopcntb VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VPOPCNTD, 0xfc0007ff00000000, 0x100007c300000000, 0x1f000000000000, // Vector Population Count Doubleword VX-form (vpopcntd VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VPOPCNTH, 0xfc0007ff00000000, 0x1000074300000000, 0x1f000000000000, // Vector Population Count Halfword VX-form (vpopcnth VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VPOPCNTW, 0xfc0007ff00000000, 0x1000078300000000, 0x1f000000000000, // Vector Population Count Word VX-form (vpopcntw VRT,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}}, |
| {VRLD, 0xfc0007ff00000000, 0x100000c400000000, 0x0, // Vector Rotate Left Doubleword VX-form (vrld VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSBOX, 0xfc0007ff00000000, 0x100005c800000000, 0xf80000000000, // Vector AES SubBytes VX-form (vsbox VRT,VRA) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15}}, |
| {VSHASIGMAD, 0xfc0007ff00000000, 0x100006c200000000, 0x0, // Vector SHA-512 Sigma Doubleword VX-form (vshasigmad VRT,VRA,ST,SIX) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_ImmUnsigned_16_16, ap_ImmUnsigned_17_20}}, |
| {VSHASIGMAW, 0xfc0007ff00000000, 0x1000068200000000, 0x0, // Vector SHA-256 Sigma Word VX-form (vshasigmaw VRT,VRA,ST,SIX) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_ImmUnsigned_16_16, ap_ImmUnsigned_17_20}}, |
| {VSLD, 0xfc0007ff00000000, 0x100005c400000000, 0x0, // Vector Shift Left Doubleword VX-form (vsld VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSRAD, 0xfc0007ff00000000, 0x100003c400000000, 0x0, // Vector Shift Right Algebraic Doubleword VX-form (vsrad VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSRD, 0xfc0007ff00000000, 0x100006c400000000, 0x0, // Vector Shift Right Doubleword VX-form (vsrd VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSUBCUQ, 0xfc0007ff00000000, 0x1000054000000000, 0x0, // Vector Subtract & write Carry-out Unsigned Quadword VX-form (vsubcuq VRT,VRA,VRB) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}}, |
| {VSUBECUQ, 0xfc00003f00000000, 0x1000003f00000000, 0x0, // Vector Subtract Extended & write Carry-out Unsigned Quadword VA-form (vsubecuq VRT,VRA,VRB,VRC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}}, |
| {VSUBEUQM, 0xfc00003f00000000, 0x1000003e00000000, 0x0, // Vector Subtract Extended Unsigned Quadword Modulo VA-form (vsubeuqm VRT,VRA,VRB,VRC) |
| [6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}}, |
|