ppc64asm,ppc64map: enable parsing of prefixed ISA 3.1 insn entries

ppc64map is modified to store a second set of Mask/Value/DontCare
fields in its internal Inst structure.  The existing fields are
used to store the prefix, and the newly added fields store similar
information about the second instruction word. It is worth noting
that prefixed instructions are 64 bits long, but are encoded like
a pair of regular (32 bit) instructions.

This requires some refactoring of ppc64map to support decoding fields
from the second instruction word.  To do this, we add a Word field
as needed to indicate the instruction word (0 == prefix, 1 == suffix),
and field names are updated to reflect bit positions as if such insn
extended to 64 bits.

All prefixed instructions share the same primary opcode, so the
decode logic will need to be updated in a subsequent patch to choose
the correct prefixed instruction when decoding.

Note, that while enabling prefix support, a few ISA bugs were found
and reported. YMSK should be 2 bits, not 4. Likewise, the nop
extended mnemonic is added as it shows up more frequently when
prefixed instructions need to be moved to avoid crossing a 64B
boundary.

Change-Id: I8fd43bbd05738e8d70267efed447022c36c4616f
Reviewed-on: https://go-review.googlesource.com/c/arch/+/307352
Run-TryBot: Carlos Eduardo Seo <carlos.seo@linaro.org>
TryBot-Result: Go Bot <gobot@golang.org>
Trust: Emmanuel Odeke <emmanuel@orijtech.com>
Reviewed-by: Lynn Boger <laboger@linux.vnet.ibm.com>
diff --git a/ppc64/pp64.csv b/ppc64/pp64.csv
index 0a10d22..6f7b7fc 100644
--- a/ppc64/pp64.csv
+++ b/ppc64/pp64.csv
@@ -66,11 +66,11 @@
 "Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate MMIRR:XX3-form","pmxvf32gernp AT,XA,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|90@21|AX@29|BX@30|/@31|",""
 "Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate MMIRR:XX3-form","pmxvf32gerpn AT,XA,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|154@21|AX@29|BX@30|/@31|",""
 "Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate MMIRR:XX3-form","pmxvf32gerpp AT,XA,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|26@21|AX@29|BX@30|/@31|",""
-"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) MMIRR:XX3-form","pmxvf64ger AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|,59@0|AT@6|//@9|Ap@11|B@16|59@21|AX@29|BX@30|/@31|",""
-"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate MMIRR:XX3-form","pmxvf64gernn AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|,59@0|AT@6|//@9|Ap@11|B@16|250@21|AX@29|BX@30|/@31|",""
-"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate MMIRR:XX3-form","pmxvf64gernp AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|,59@0|AT@6|//@9|Ap@11|B@16|122@21|AX@29|BX@30|/@31|",""
-"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate MMIRR:XX3-form","pmxvf64gerpn AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|,59@0|AT@6|//@9|Ap@11|B@16|186@21|AX@29|BX@30|/@31|",""
-"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate MMIRR:XX3-form","pmxvf64gerpp AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|,59@0|AT@6|//@9|Ap@11|B@16|58@21|AX@29|BX@30|/@31|",""
+"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) MMIRR:XX3-form","pmxvf64ger AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|//@30|,59@0|AT@6|//@9|Ap@11|B@16|59@21|AX@29|BX@30|/@31|",""
+"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate MMIRR:XX3-form","pmxvf64gernn AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|//@30|,59@0|AT@6|//@9|Ap@11|B@16|250@21|AX@29|BX@30|/@31|",""
+"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate MMIRR:XX3-form","pmxvf64gernp AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|//@30|,59@0|AT@6|//@9|Ap@11|B@16|122@21|AX@29|BX@30|/@31|",""
+"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate MMIRR:XX3-form","pmxvf64gerpn AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|//@30|,59@0|AT@6|//@9|Ap@11|B@16|186@21|AX@29|BX@30|/@31|",""
+"Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate MMIRR:XX3-form","pmxvf64gerpp AT,XAp,XB,XMSK,YMSK",",1@0|3@6|9@8|//@12|/@14|/@15|///@16|XMSK@24|YMSK@28|//@30|,59@0|AT@6|//@9|Ap@11|B@16|58@21|AX@29|BX@30|/@31|",""
 "Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) MMIRR:XX3-form","pmxvi16ger2 AT,XA,XB,XMSK,YMSK,PMSK",",1@0|3@6|9@8|//@12|/@14|/@15|PMSK@16|///@18|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|75@21|AX@29|BX@30|/@31|",""
 "Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) Positive multiply, Positive accumulate MMIRR:XX3-form","pmxvi16ger2pp AT,XA,XB,XMSK,YMSK,PMSK",",1@0|3@6|9@8|//@12|/@14|/@15|PMSK@16|///@18|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|107@21|AX@29|BX@30|/@31|",""
 "Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation MMIRR:XX3-form","pmxvi16ger2s AT,XA,XB,XMSK,YMSK,PMSK",",1@0|3@6|9@8|//@12|/@14|/@15|PMSK@16|///@18|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|43@21|AX@29|BX@30|/@31|",""
@@ -80,7 +80,7 @@
 "Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) MMIRR:XX3-form","pmxvi8ger4 AT,XA,XB,XMSK,YMSK,PMSK",",1@0|3@6|9@8|//@12|/@14|/@15|PMSK@16|///@20|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|3@21|AX@29|BX@30|/@31|",""
 "Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) Positive multiply, Positive accumulate MMIRR:XX3-form","pmxvi8ger4pp AT,XA,XB,XMSK,YMSK,PMSK",",1@0|3@6|9@8|//@12|/@14|/@15|PMSK@16|///@20|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|2@21|AX@29|BX@30|/@31|",""
 "Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) with Saturate Positive multiply, Positive accumulate MMIRR:XX3-form","pmxvi8ger4spp AT,XA,XB,XMSK,YMSK,PMSK",",1@0|3@6|9@8|//@12|/@14|/@15|PMSK@16|///@20|XMSK@24|YMSK@28|,59@0|AT@6|//@9|A@11|B@16|99@21|AX@29|BX@30|/@31|",""
-"Prefixed Nop MRR:*-form","pnop",",1@0|3@6|0@8|///@12|0@14|//@31|",""
+"Prefixed Nop MRR:*-form","pnop",",1@0|3@6|0@8|///@12|0@14|//@31|,///@0|",""
 "Prefixed Store Byte MLS:D-form","pstb RS,D(RA),R",",1@0|2@6|0@8|//@9|R@11|//@12|d0@14|,38@0|RS@6|RA@11|d1@16|",""
 "Prefixed Store Doubleword 8LS:D-form","pstd RS,D(RA),R",",1@0|0@6|0@8|//@9|R@11|//@12|d0@14|,61@0|RS@6|RA@11|d1@16|",""
 "Prefixed Store Floating-Point Double MLS:D-form","pstfd FRS,D(RA),R",",1@0|2@6|0@8|//@9|R@11|//@12|d0@14|,54@0|FRS@6|RA@11|d1@16|",""
@@ -1127,7 +1127,7 @@
 "NOR X-form","nor RA,RS,RB (Rc=0)|nor. RA,RS,RB (Rc=1)","31@0|RS@6|RA@11|RB@16|124@21|Rc@31|",""
 "OR X-form","or RA,RS,RB (Rc=0)|or. RA,RS,RB (Rc=1)","31@0|RS@6|RA@11|RB@16|444@21|Rc@31|",""
 "OR with Complement X-form","orc RA,RS,RB (Rc=0)|orc. RA,RS,RB (Rc=1)","31@0|RS@6|RA@11|RB@16|412@21|Rc@31|",""
-"OR Immediate D-form","ori RA,RS,UI","24@0|RS@6|RA@11|UI@16|",""
+"OR Immediate D-form","ori RA,RS,UI|nop (RA=0 RS=0 UI=0)","24@0|RS@6|RA@11|UI@16|",""
 "OR Immediate Shifted D-form","oris RA,RS,UI","25@0|RS@6|RA@11|UI@16|",""
 "Rotate Left Word Immediate then Mask Insert M-form","rlwimi RA,RS,SH,MB,ME (Rc=0)|rlwimi. RA,RS,SH,MB,ME (Rc=1)","20@0|RS@6|RA@11|SH@16|MB@21|ME@26|Rc@31|",""
 "Rotate Left Word Immediate then AND with Mask M-form","rlwinm RA,RS,SH,MB,ME (Rc=0)|rlwinm. RA,RS,SH,MB,ME (Rc=1)","21@0|RS@6|RA@11|SH@16|MB@21|ME@26|Rc@31|",""
diff --git a/ppc64/ppc64asm/decode.go b/ppc64/ppc64asm/decode.go
index cf56648..cf1c57d 100644
--- a/ppc64/ppc64asm/decode.go
+++ b/ppc64/ppc64asm/decode.go
@@ -23,7 +23,7 @@
 	Mask     uint32
 	Value    uint32
 	DontCare uint32
-	Args     [5]*argField
+	Args     [6]*argField
 }
 
 // argField indicate how to decode an argument to an instruction.
diff --git a/ppc64/ppc64asm/field.go b/ppc64/ppc64asm/field.go
index 26a4fdf..6410089 100644
--- a/ppc64/ppc64asm/field.go
+++ b/ppc64/ppc64asm/field.go
@@ -14,6 +14,10 @@
 type BitField struct {
 	Offs uint8 // the offset of the left-most bit.
 	Bits uint8 // length in bits.
+	// This instruction word holding this field.
+	// It is always 0 for ISA < 3.1 instructions. It is
+	// in decoding order. (0 == prefix, 1 == suffix on ISA 3.1)
+	Word uint8
 }
 
 func (b BitField) String() string {
diff --git a/ppc64/ppc64asm/field_test.go b/ppc64/ppc64asm/field_test.go
index 14eb2f8..602bbcd 100644
--- a/ppc64/ppc64asm/field_test.go
+++ b/ppc64/ppc64asm/field_test.go
@@ -26,15 +26,15 @@
 		s    int32  // signed output
 		fail bool   // if the check should panic
 	}{
-		{BitField{0, 0}, 0, 0, 0, true},
-		{BitField{31, 2}, 0, 0, 0, true},
-		{BitField{31, 1}, 1, 1, -1, false},
-		{BitField{29, 2}, 0 << 1, 0, 0, false},
-		{BitField{29, 2}, 1 << 1, 1, 1, false},
-		{BitField{29, 2}, 2 << 1, 2, -2, false},
-		{BitField{29, 2}, 3 << 1, 3, -1, false},
-		{BitField{0, 32}, 1<<32 - 1, 1<<32 - 1, -1, false},
-		{BitField{16, 3}, 1 << 15, 4, -4, false},
+		{BitField{0, 0, 0}, 0, 0, 0, true},
+		{BitField{31, 2, 0}, 0, 0, 0, true},
+		{BitField{31, 1, 0}, 1, 1, -1, false},
+		{BitField{29, 2, 0}, 0 << 1, 0, 0, false},
+		{BitField{29, 2, 0}, 1 << 1, 1, 1, false},
+		{BitField{29, 2, 0}, 2 << 1, 2, -2, false},
+		{BitField{29, 2, 0}, 3 << 1, 3, -1, false},
+		{BitField{0, 32, 0}, 1<<32 - 1, 1<<32 - 1, -1, false},
+		{BitField{16, 3, 0}, 1 << 15, 4, -4, false},
 	}
 	for i, tst := range tests {
 		var (
diff --git a/ppc64/ppc64asm/tables.go b/ppc64/ppc64asm/tables.go
index eaa6cb9..b4065a8 100644
--- a/ppc64/ppc64asm/tables.go
+++ b/ppc64/ppc64asm/tables.go
@@ -1,5 +1,5 @@
 // DO NOT EDIT
-// generated by: ppc64map -fmt=decoder pp64.csv
+// generated by: ppc64map -fmt=decoder ../pp64.csv
 
 package ppc64asm
 
@@ -26,8 +26,63 @@
 	MTVSRHM
 	MTVSRQM
 	MTVSRWM
+	PADDI
 	PDEPD
 	PEXTD
+	PLBZ
+	PLD
+	PLFD
+	PLFS
+	PLHA
+	PLHZ
+	PLQ
+	PLWA
+	PLWZ
+	PLXSD
+	PLXSSP
+	PLXV
+	PLXVP
+	PMXVBF16GER2
+	PMXVBF16GER2NN
+	PMXVBF16GER2NP
+	PMXVBF16GER2PN
+	PMXVBF16GER2PP
+	PMXVF16GER2
+	PMXVF16GER2NN
+	PMXVF16GER2NP
+	PMXVF16GER2PN
+	PMXVF16GER2PP
+	PMXVF32GER
+	PMXVF32GERNN
+	PMXVF32GERNP
+	PMXVF32GERPN
+	PMXVF32GERPP
+	PMXVF64GER
+	PMXVF64GERNN
+	PMXVF64GERNP
+	PMXVF64GERPN
+	PMXVF64GERPP
+	PMXVI16GER2
+	PMXVI16GER2PP
+	PMXVI16GER2S
+	PMXVI16GER2SPP
+	PMXVI4GER8
+	PMXVI4GER8PP
+	PMXVI8GER4
+	PMXVI8GER4PP
+	PMXVI8GER4SPP
+	PNOP
+	PSTB
+	PSTD
+	PSTFD
+	PSTFS
+	PSTH
+	PSTQ
+	PSTW
+	PSTXSD
+	PSTXSSP
+	PSTXV
+	PSTXVP
 	SETBC
 	SETBCR
 	SETNBC
@@ -178,13 +233,22 @@
 	XVI8GER4PP
 	XVI8GER4SPP
 	XVTLSBB
+	XXBLENDVB
+	XXBLENDVD
+	XXBLENDVH
+	XXBLENDVW
+	XXEVAL
 	XXGENPCVBM
 	XXGENPCVDM
 	XXGENPCVHM
 	XXGENPCVWM
 	XXMFACC
 	XXMTACC
+	XXPERMX
 	XXSETACCZ
+	XXSPLTI32DX
+	XXSPLTIDP
+	XXSPLTIW
 	MSGCLRU
 	MSGSNDU
 	URFID
@@ -1287,6 +1351,7 @@
 	ORCC
 	ORC
 	ORCCC
+	NOP
 	ORI
 	ORIS
 	RLWIMI
@@ -1356,4134 +1421,4352 @@
 )
 
 var opstr = [...]string{
-	BRD:          "brd",
-	BRH:          "brh",
-	BRW:          "brw",
-	CFUGED:       "cfuged",
-	CNTLZDM:      "cntlzdm",
-	CNTTZDM:      "cnttzdm",
-	DCFFIXQQ:     "dcffixqq",
-	DCTFIXQQ:     "dctfixqq",
-	LXVKQ:        "lxvkq",
-	LXVP:         "lxvp",
-	LXVPX:        "lxvpx",
-	LXVRBX:       "lxvrbx",
-	LXVRDX:       "lxvrdx",
-	LXVRHX:       "lxvrhx",
-	LXVRWX:       "lxvrwx",
-	MTVSRBM:      "mtvsrbm",
-	MTVSRBMI:     "mtvsrbmi",
-	MTVSRDM:      "mtvsrdm",
-	MTVSRHM:      "mtvsrhm",
-	MTVSRQM:      "mtvsrqm",
-	MTVSRWM:      "mtvsrwm",
-	PDEPD:        "pdepd",
-	PEXTD:        "pextd",
-	SETBC:        "setbc",
-	SETBCR:       "setbcr",
-	SETNBC:       "setnbc",
-	SETNBCR:      "setnbcr",
-	STXVP:        "stxvp",
-	STXVPX:       "stxvpx",
-	STXVRBX:      "stxvrbx",
-	STXVRDX:      "stxvrdx",
-	STXVRHX:      "stxvrhx",
-	STXVRWX:      "stxvrwx",
-	VCFUGED:      "vcfuged",
-	VCLRLB:       "vclrlb",
-	VCLRRB:       "vclrrb",
-	VCLZDM:       "vclzdm",
-	VCMPEQUQ:     "vcmpequq",
-	VCMPEQUQCC:   "vcmpequq.",
-	VCMPGTSQ:     "vcmpgtsq",
-	VCMPGTSQCC:   "vcmpgtsq.",
-	VCMPGTUQ:     "vcmpgtuq",
-	VCMPGTUQCC:   "vcmpgtuq.",
-	VCMPSQ:       "vcmpsq",
-	VCMPUQ:       "vcmpuq",
-	VCNTMBB:      "vcntmbb",
-	VCNTMBD:      "vcntmbd",
-	VCNTMBH:      "vcntmbh",
-	VCNTMBW:      "vcntmbw",
-	VCTZDM:       "vctzdm",
-	VDIVESD:      "vdivesd",
-	VDIVESQ:      "vdivesq",
-	VDIVESW:      "vdivesw",
-	VDIVEUD:      "vdiveud",
-	VDIVEUQ:      "vdiveuq",
-	VDIVEUW:      "vdiveuw",
-	VDIVSD:       "vdivsd",
-	VDIVSQ:       "vdivsq",
-	VDIVSW:       "vdivsw",
-	VDIVUD:       "vdivud",
-	VDIVUQ:       "vdivuq",
-	VDIVUW:       "vdivuw",
-	VEXPANDBM:    "vexpandbm",
-	VEXPANDDM:    "vexpanddm",
-	VEXPANDHM:    "vexpandhm",
-	VEXPANDQM:    "vexpandqm",
-	VEXPANDWM:    "vexpandwm",
-	VEXTDDVLX:    "vextddvlx",
-	VEXTDDVRX:    "vextddvrx",
-	VEXTDUBVLX:   "vextdubvlx",
-	VEXTDUBVRX:   "vextdubvrx",
-	VEXTDUHVLX:   "vextduhvlx",
-	VEXTDUHVRX:   "vextduhvrx",
-	VEXTDUWVLX:   "vextduwvlx",
-	VEXTDUWVRX:   "vextduwvrx",
-	VEXTRACTBM:   "vextractbm",
-	VEXTRACTDM:   "vextractdm",
-	VEXTRACTHM:   "vextracthm",
-	VEXTRACTQM:   "vextractqm",
-	VEXTRACTWM:   "vextractwm",
-	VEXTSD2Q:     "vextsd2q",
-	VGNB:         "vgnb",
-	VINSBLX:      "vinsblx",
-	VINSBRX:      "vinsbrx",
-	VINSBVLX:     "vinsbvlx",
-	VINSBVRX:     "vinsbvrx",
-	VINSD:        "vinsd",
-	VINSDLX:      "vinsdlx",
-	VINSDRX:      "vinsdrx",
-	VINSHLX:      "vinshlx",
-	VINSHRX:      "vinshrx",
-	VINSHVLX:     "vinshvlx",
-	VINSHVRX:     "vinshvrx",
-	VINSW:        "vinsw",
-	VINSWLX:      "vinswlx",
-	VINSWRX:      "vinswrx",
-	VINSWVLX:     "vinswvlx",
-	VINSWVRX:     "vinswvrx",
-	VMODSD:       "vmodsd",
-	VMODSQ:       "vmodsq",
-	VMODSW:       "vmodsw",
-	VMODUD:       "vmodud",
-	VMODUQ:       "vmoduq",
-	VMODUW:       "vmoduw",
-	VMSUMCUD:     "vmsumcud",
-	VMULESD:      "vmulesd",
-	VMULEUD:      "vmuleud",
-	VMULHSD:      "vmulhsd",
-	VMULHSW:      "vmulhsw",
-	VMULHUD:      "vmulhud",
-	VMULHUW:      "vmulhuw",
-	VMULLD:       "vmulld",
-	VMULOSD:      "vmulosd",
-	VMULOUD:      "vmuloud",
-	VPDEPD:       "vpdepd",
-	VPEXTD:       "vpextd",
-	VRLQ:         "vrlq",
-	VRLQMI:       "vrlqmi",
-	VRLQNM:       "vrlqnm",
-	VSLDBI:       "vsldbi",
-	VSLQ:         "vslq",
-	VSRAQ:        "vsraq",
-	VSRDBI:       "vsrdbi",
-	VSRQ:         "vsrq",
-	VSTRIBL:      "vstribl",
-	VSTRIBLCC:    "vstribl.",
-	VSTRIBR:      "vstribr",
-	VSTRIBRCC:    "vstribr.",
-	VSTRIHL:      "vstrihl",
-	VSTRIHLCC:    "vstrihl.",
-	VSTRIHR:      "vstrihr",
-	VSTRIHRCC:    "vstrihr.",
-	XSCMPEQQP:    "xscmpeqqp",
-	XSCMPGEQP:    "xscmpgeqp",
-	XSCMPGTQP:    "xscmpgtqp",
-	XSCVQPSQZ:    "xscvqpsqz",
-	XSCVQPUQZ:    "xscvqpuqz",
-	XSCVSQQP:     "xscvsqqp",
-	XSCVUQQP:     "xscvuqqp",
-	XSMAXCQP:     "xsmaxcqp",
-	XSMINCQP:     "xsmincqp",
-	XVBF16GER2:   "xvbf16ger2",
-	XVBF16GER2NN: "xvbf16ger2nn",
-	XVBF16GER2NP: "xvbf16ger2np",
-	XVBF16GER2PN: "xvbf16ger2pn",
-	XVBF16GER2PP: "xvbf16ger2pp",
-	XVCVBF16SPN:  "xvcvbf16spn",
-	XVCVSPBF16:   "xvcvspbf16",
-	XVF16GER2:    "xvf16ger2",
-	XVF16GER2NN:  "xvf16ger2nn",
-	XVF16GER2NP:  "xvf16ger2np",
-	XVF16GER2PN:  "xvf16ger2pn",
-	XVF16GER2PP:  "xvf16ger2pp",
-	XVF32GER:     "xvf32ger",
-	XVF32GERNN:   "xvf32gernn",
-	XVF32GERNP:   "xvf32gernp",
-	XVF32GERPN:   "xvf32gerpn",
-	XVF32GERPP:   "xvf32gerpp",
-	XVF64GER:     "xvf64ger",
-	XVF64GERNN:   "xvf64gernn",
-	XVF64GERNP:   "xvf64gernp",
-	XVF64GERPN:   "xvf64gerpn",
-	XVF64GERPP:   "xvf64gerpp",
-	XVI16GER2:    "xvi16ger2",
-	XVI16GER2PP:  "xvi16ger2pp",
-	XVI16GER2S:   "xvi16ger2s",
-	XVI16GER2SPP: "xvi16ger2spp",
-	XVI4GER8:     "xvi4ger8",
-	XVI4GER8PP:   "xvi4ger8pp",
-	XVI8GER4:     "xvi8ger4",
-	XVI8GER4PP:   "xvi8ger4pp",
-	XVI8GER4SPP:  "xvi8ger4spp",
-	XVTLSBB:      "xvtlsbb",
-	XXGENPCVBM:   "xxgenpcvbm",
-	XXGENPCVDM:   "xxgenpcvdm",
-	XXGENPCVHM:   "xxgenpcvhm",
-	XXGENPCVWM:   "xxgenpcvwm",
-	XXMFACC:      "xxmfacc",
-	XXMTACC:      "xxmtacc",
-	XXSETACCZ:    "xxsetaccz",
-	MSGCLRU:      "msgclru",
-	MSGSNDU:      "msgsndu",
-	URFID:        "urfid",
-	ADDEX:        "addex",
-	MFFSCDRN:     "mffscdrn",
-	MFFSCDRNI:    "mffscdrni",
-	MFFSCE:       "mffsce",
-	MFFSCRN:      "mffscrn",
-	MFFSCRNI:     "mffscrni",
-	MFFSL:        "mffsl",
-	SLBIAG:       "slbiag",
-	VMSUMUDM:     "vmsumudm",
-	ADDPCIS:      "addpcis",
-	BCDCFNCC:     "bcdcfn.",
-	BCDCFSQCC:    "bcdcfsq.",
-	BCDCFZCC:     "bcdcfz.",
-	BCDCPSGNCC:   "bcdcpsgn.",
-	BCDCTNCC:     "bcdctn.",
-	BCDCTSQCC:    "bcdctsq.",
-	BCDCTZCC:     "bcdctz.",
-	BCDSCC:       "bcds.",
-	BCDSETSGNCC:  "bcdsetsgn.",
-	BCDSRCC:      "bcdsr.",
-	BCDTRUNCCC:   "bcdtrunc.",
-	BCDUSCC:      "bcdus.",
-	BCDUTRUNCCC:  "bcdutrunc.",
-	CMPEQB:       "cmpeqb",
-	CMPRB:        "cmprb",
-	CNTTZD:       "cnttzd",
-	CNTTZDCC:     "cnttzd.",
-	CNTTZW:       "cnttzw",
-	CNTTZWCC:     "cnttzw.",
-	COPY:         "copy",
-	CPABORT:      "cpabort",
-	DARN:         "darn",
-	DTSTSFI:      "dtstsfi",
-	DTSTSFIQ:     "dtstsfiq",
-	EXTSWSLI:     "extswsli",
-	EXTSWSLICC:   "extswsli.",
-	LDAT:         "ldat",
-	LWAT:         "lwat",
-	LXSD:         "lxsd",
-	LXSIBZX:      "lxsibzx",
-	LXSIHZX:      "lxsihzx",
-	LXSSP:        "lxssp",
-	LXV:          "lxv",
-	LXVB16X:      "lxvb16x",
-	LXVH8X:       "lxvh8x",
-	LXVL:         "lxvl",
-	LXVLL:        "lxvll",
-	LXVWSX:       "lxvwsx",
-	LXVX:         "lxvx",
-	MADDHD:       "maddhd",
-	MADDHDU:      "maddhdu",
-	MADDLD:       "maddld",
-	MCRXRX:       "mcrxrx",
-	MFVSRLD:      "mfvsrld",
-	MODSD:        "modsd",
-	MODSW:        "modsw",
-	MODUD:        "modud",
-	MODUW:        "moduw",
-	MSGSYNC:      "msgsync",
-	MTVSRDD:      "mtvsrdd",
-	MTVSRWS:      "mtvsrws",
-	PASTECC:      "paste.",
-	SETB:         "setb",
-	SLBIEG:       "slbieg",
-	SLBSYNC:      "slbsync",
-	STDAT:        "stdat",
-	STOP:         "stop",
-	STWAT:        "stwat",
-	STXSD:        "stxsd",
-	STXSIBX:      "stxsibx",
-	STXSIHX:      "stxsihx",
-	STXSSP:       "stxssp",
-	STXV:         "stxv",
-	STXVB16X:     "stxvb16x",
-	STXVH8X:      "stxvh8x",
-	STXVL:        "stxvl",
-	STXVLL:       "stxvll",
-	STXVX:        "stxvx",
-	VABSDUB:      "vabsdub",
-	VABSDUH:      "vabsduh",
-	VABSDUW:      "vabsduw",
-	VBPERMD:      "vbpermd",
-	VCLZLSBB:     "vclzlsbb",
-	VCMPNEB:      "vcmpneb",
-	VCMPNEBCC:    "vcmpneb.",
-	VCMPNEH:      "vcmpneh",
-	VCMPNEHCC:    "vcmpneh.",
-	VCMPNEW:      "vcmpnew",
-	VCMPNEWCC:    "vcmpnew.",
-	VCMPNEZB:     "vcmpnezb",
-	VCMPNEZBCC:   "vcmpnezb.",
-	VCMPNEZH:     "vcmpnezh",
-	VCMPNEZHCC:   "vcmpnezh.",
-	VCMPNEZW:     "vcmpnezw",
-	VCMPNEZWCC:   "vcmpnezw.",
-	VCTZB:        "vctzb",
-	VCTZD:        "vctzd",
-	VCTZH:        "vctzh",
-	VCTZLSBB:     "vctzlsbb",
-	VCTZW:        "vctzw",
-	VEXTRACTD:    "vextractd",
-	VEXTRACTUB:   "vextractub",
-	VEXTRACTUH:   "vextractuh",
-	VEXTRACTUW:   "vextractuw",
-	VEXTSB2D:     "vextsb2d",
-	VEXTSB2W:     "vextsb2w",
-	VEXTSH2D:     "vextsh2d",
-	VEXTSH2W:     "vextsh2w",
-	VEXTSW2D:     "vextsw2d",
-	VEXTUBLX:     "vextublx",
-	VEXTUBRX:     "vextubrx",
-	VEXTUHLX:     "vextuhlx",
-	VEXTUHRX:     "vextuhrx",
-	VEXTUWLX:     "vextuwlx",
-	VEXTUWRX:     "vextuwrx",
-	VINSERTB:     "vinsertb",
-	VINSERTD:     "vinsertd",
-	VINSERTH:     "vinserth",
-	VINSERTW:     "vinsertw",
-	VMUL10CUQ:    "vmul10cuq",
-	VMUL10ECUQ:   "vmul10ecuq",
-	VMUL10EUQ:    "vmul10euq",
-	VMUL10UQ:     "vmul10uq",
-	VNEGD:        "vnegd",
-	VNEGW:        "vnegw",
-	VPERMR:       "vpermr",
-	VPRTYBD:      "vprtybd",
-	VPRTYBQ:      "vprtybq",
-	VPRTYBW:      "vprtybw",
-	VRLDMI:       "vrldmi",
-	VRLDNM:       "vrldnm",
-	VRLWMI:       "vrlwmi",
-	VRLWNM:       "vrlwnm",
-	VSLV:         "vslv",
-	VSRV:         "vsrv",
-	WAIT:         "wait",
-	XSABSQP:      "xsabsqp",
-	XSADDQP:      "xsaddqp",
-	XSADDQPO:     "xsaddqpo",
-	XSCMPEQDP:    "xscmpeqdp",
-	XSCMPEXPDP:   "xscmpexpdp",
-	XSCMPEXPQP:   "xscmpexpqp",
-	XSCMPGEDP:    "xscmpgedp",
-	XSCMPGTDP:    "xscmpgtdp",
-	XSCMPOQP:     "xscmpoqp",
-	XSCMPUQP:     "xscmpuqp",
-	XSCPSGNQP:    "xscpsgnqp",
-	XSCVDPHP:     "xscvdphp",
-	XSCVDPQP:     "xscvdpqp",
-	XSCVHPDP:     "xscvhpdp",
-	XSCVQPDP:     "xscvqpdp",
-	XSCVQPDPO:    "xscvqpdpo",
-	XSCVQPSDZ:    "xscvqpsdz",
-	XSCVQPSWZ:    "xscvqpswz",
-	XSCVQPUDZ:    "xscvqpudz",
-	XSCVQPUWZ:    "xscvqpuwz",
-	XSCVSDQP:     "xscvsdqp",
-	XSCVUDQP:     "xscvudqp",
-	XSDIVQP:      "xsdivqp",
-	XSDIVQPO:     "xsdivqpo",
-	XSIEXPDP:     "xsiexpdp",
-	XSIEXPQP:     "xsiexpqp",
-	XSMADDQP:     "xsmaddqp",
-	XSMADDQPO:    "xsmaddqpo",
-	XSMAXCDP:     "xsmaxcdp",
-	XSMAXJDP:     "xsmaxjdp",
-	XSMINCDP:     "xsmincdp",
-	XSMINJDP:     "xsminjdp",
-	XSMSUBQP:     "xsmsubqp",
-	XSMSUBQPO:    "xsmsubqpo",
-	XSMULQP:      "xsmulqp",
-	XSMULQPO:     "xsmulqpo",
-	XSNABSQP:     "xsnabsqp",
-	XSNEGQP:      "xsnegqp",
-	XSNMADDQP:    "xsnmaddqp",
-	XSNMADDQPO:   "xsnmaddqpo",
-	XSNMSUBQP:    "xsnmsubqp",
-	XSNMSUBQPO:   "xsnmsubqpo",
-	XSRQPI:       "xsrqpi",
-	XSRQPIX:      "xsrqpix",
-	XSRQPXP:      "xsrqpxp",
-	XSSQRTQP:     "xssqrtqp",
-	XSSQRTQPO:    "xssqrtqpo",
-	XSSUBQP:      "xssubqp",
-	XSSUBQPO:     "xssubqpo",
-	XSTSTDCDP:    "xststdcdp",
-	XSTSTDCQP:    "xststdcqp",
-	XSTSTDCSP:    "xststdcsp",
-	XSXEXPDP:     "xsxexpdp",
-	XSXEXPQP:     "xsxexpqp",
-	XSXSIGDP:     "xsxsigdp",
-	XSXSIGQP:     "xsxsigqp",
-	XVCVHPSP:     "xvcvhpsp",
-	XVCVSPHP:     "xvcvsphp",
-	XVIEXPDP:     "xviexpdp",
-	XVIEXPSP:     "xviexpsp",
-	XVTSTDCDP:    "xvtstdcdp",
-	XVTSTDCSP:    "xvtstdcsp",
-	XVXEXPDP:     "xvxexpdp",
-	XVXEXPSP:     "xvxexpsp",
-	XVXSIGDP:     "xvxsigdp",
-	XVXSIGSP:     "xvxsigsp",
-	XXBRD:        "xxbrd",
-	XXBRH:        "xxbrh",
-	XXBRQ:        "xxbrq",
-	XXBRW:        "xxbrw",
-	XXEXTRACTUW:  "xxextractuw",
-	XXINSERTW:    "xxinsertw",
-	XXPERM:       "xxperm",
-	XXPERMR:      "xxpermr",
-	XXSPLTIB:     "xxspltib",
-	BCDADDCC:     "bcdadd.",
-	BCDSUBCC:     "bcdsub.",
-	BCTAR:        "bctar",
-	BCTARL:       "bctarl",
-	CLRBHRB:      "clrbhrb",
-	FMRGEW:       "fmrgew",
-	FMRGOW:       "fmrgow",
-	ICBT:         "icbt",
-	LQARX:        "lqarx",
-	LXSIWAX:      "lxsiwax",
-	LXSIWZX:      "lxsiwzx",
-	LXSSPX:       "lxsspx",
-	MFBHRBE:      "mfbhrbe",
-	MFVSRD:       "mfvsrd",
-	MFVSRWZ:      "mfvsrwz",
-	MSGCLR:       "msgclr",
-	MSGCLRP:      "msgclrp",
-	MSGSND:       "msgsnd",
-	MSGSNDP:      "msgsndp",
-	MTVSRD:       "mtvsrd",
-	MTVSRWA:      "mtvsrwa",
-	MTVSRWZ:      "mtvsrwz",
-	RFEBB:        "rfebb",
-	STQCXCC:      "stqcx.",
-	STXSIWX:      "stxsiwx",
-	STXSSPX:      "stxsspx",
-	VADDCUQ:      "vaddcuq",
-	VADDECUQ:     "vaddecuq",
-	VADDEUQM:     "vaddeuqm",
-	VADDUDM:      "vaddudm",
-	VADDUQM:      "vadduqm",
-	VBPERMQ:      "vbpermq",
-	VCIPHER:      "vcipher",
-	VCIPHERLAST:  "vcipherlast",
-	VCLZB:        "vclzb",
-	VCLZD:        "vclzd",
-	VCLZH:        "vclzh",
-	VCLZW:        "vclzw",
-	VCMPEQUD:     "vcmpequd",
-	VCMPEQUDCC:   "vcmpequd.",
-	VCMPGTSD:     "vcmpgtsd",
-	VCMPGTSDCC:   "vcmpgtsd.",
-	VCMPGTUD:     "vcmpgtud",
-	VCMPGTUDCC:   "vcmpgtud.",
-	VEQV:         "veqv",
-	VGBBD:        "vgbbd",
-	VMAXSD:       "vmaxsd",
-	VMAXUD:       "vmaxud",
-	VMINSD:       "vminsd",
-	VMINUD:       "vminud",
-	VMRGEW:       "vmrgew",
-	VMRGOW:       "vmrgow",
-	VMULESW:      "vmulesw",
-	VMULEUW:      "vmuleuw",
-	VMULOSW:      "vmulosw",
-	VMULOUW:      "vmulouw",
-	VMULUWM:      "vmuluwm",
-	VNAND:        "vnand",
-	VNCIPHER:     "vncipher",
-	VNCIPHERLAST: "vncipherlast",
-	VORC:         "vorc",
-	VPERMXOR:     "vpermxor",
-	VPKSDSS:      "vpksdss",
-	VPKSDUS:      "vpksdus",
-	VPKUDUM:      "vpkudum",
-	VPKUDUS:      "vpkudus",
-	VPMSUMB:      "vpmsumb",
-	VPMSUMD:      "vpmsumd",
-	VPMSUMH:      "vpmsumh",
-	VPMSUMW:      "vpmsumw",
-	VPOPCNTB:     "vpopcntb",
-	VPOPCNTD:     "vpopcntd",
-	VPOPCNTH:     "vpopcnth",
-	VPOPCNTW:     "vpopcntw",
-	VRLD:         "vrld",
-	VSBOX:        "vsbox",
-	VSHASIGMAD:   "vshasigmad",
-	VSHASIGMAW:   "vshasigmaw",
-	VSLD:         "vsld",
-	VSRAD:        "vsrad",
-	VSRD:         "vsrd",
-	VSUBCUQ:      "vsubcuq",
-	VSUBECUQ:     "vsubecuq",
-	VSUBEUQM:     "vsubeuqm",
-	VSUBUDM:      "vsubudm",
-	VSUBUQM:      "vsubuqm",
-	VUPKHSW:      "vupkhsw",
-	VUPKLSW:      "vupklsw",
-	XSADDSP:      "xsaddsp",
-	XSCVDPSPN:    "xscvdpspn",
-	XSCVSPDPN:    "xscvspdpn",
-	XSCVSXDSP:    "xscvsxdsp",
-	XSCVUXDSP:    "xscvuxdsp",
-	XSDIVSP:      "xsdivsp",
-	XSMADDASP:    "xsmaddasp",
-	XSMADDMSP:    "xsmaddmsp",
-	XSMSUBASP:    "xsmsubasp",
-	XSMSUBMSP:    "xsmsubmsp",
-	XSMULSP:      "xsmulsp",
-	XSNMADDASP:   "xsnmaddasp",
-	XSNMADDMSP:   "xsnmaddmsp",
-	XSNMSUBASP:   "xsnmsubasp",
-	XSNMSUBMSP:   "xsnmsubmsp",
-	XSRESP:       "xsresp",
-	XSRSP:        "xsrsp",
-	XSRSQRTESP:   "xsrsqrtesp",
-	XSSQRTSP:     "xssqrtsp",
-	XSSUBSP:      "xssubsp",
-	XXLEQV:       "xxleqv",
-	XXLNAND:      "xxlnand",
-	XXLORC:       "xxlorc",
-	ADDG6S:       "addg6s",
-	BPERMD:       "bpermd",
-	CBCDTD:       "cbcdtd",
-	CDTBCD:       "cdtbcd",
-	DCFFIX:       "dcffix",
-	DCFFIXCC:     "dcffix.",
-	DIVDE:        "divde",
-	DIVDECC:      "divde.",
-	DIVDEO:       "divdeo",
-	DIVDEOCC:     "divdeo.",
-	DIVDEU:       "divdeu",
-	DIVDEUCC:     "divdeu.",
-	DIVDEUO:      "divdeuo",
-	DIVDEUOCC:    "divdeuo.",
-	DIVWE:        "divwe",
-	DIVWECC:      "divwe.",
-	DIVWEO:       "divweo",
-	DIVWEOCC:     "divweo.",
-	DIVWEU:       "divweu",
-	DIVWEUCC:     "divweu.",
-	DIVWEUO:      "divweuo",
-	DIVWEUOCC:    "divweuo.",
-	FCFIDS:       "fcfids",
-	FCFIDSCC:     "fcfids.",
-	FCFIDU:       "fcfidu",
-	FCFIDUCC:     "fcfidu.",
-	FCFIDUS:      "fcfidus",
-	FCFIDUSCC:    "fcfidus.",
-	FCTIDU:       "fctidu",
-	FCTIDUCC:     "fctidu.",
-	FCTIDUZ:      "fctiduz",
-	FCTIDUZCC:    "fctiduz.",
-	FCTIWU:       "fctiwu",
-	FCTIWUCC:     "fctiwu.",
-	FCTIWUZ:      "fctiwuz",
-	FCTIWUZCC:    "fctiwuz.",
-	FTDIV:        "ftdiv",
-	FTSQRT:       "ftsqrt",
-	LBARX:        "lbarx",
-	LDBRX:        "ldbrx",
-	LFIWZX:       "lfiwzx",
-	LHARX:        "lharx",
-	LXSDX:        "lxsdx",
-	LXVD2X:       "lxvd2x",
-	LXVDSX:       "lxvdsx",
-	LXVW4X:       "lxvw4x",
-	POPCNTD:      "popcntd",
-	POPCNTW:      "popcntw",
-	STBCXCC:      "stbcx.",
-	STDBRX:       "stdbrx",
-	STHCXCC:      "sthcx.",
-	STXSDX:       "stxsdx",
-	STXVD2X:      "stxvd2x",
-	STXVW4X:      "stxvw4x",
-	XSABSDP:      "xsabsdp",
-	XSADDDP:      "xsadddp",
-	XSCMPODP:     "xscmpodp",
-	XSCMPUDP:     "xscmpudp",
-	XSCPSGNDP:    "xscpsgndp",
-	XSCVDPSP:     "xscvdpsp",
-	XSCVDPSXDS:   "xscvdpsxds",
-	XSCVDPSXWS:   "xscvdpsxws",
-	XSCVDPUXDS:   "xscvdpuxds",
-	XSCVDPUXWS:   "xscvdpuxws",
-	XSCVSPDP:     "xscvspdp",
-	XSCVSXDDP:    "xscvsxddp",
-	XSCVUXDDP:    "xscvuxddp",
-	XSDIVDP:      "xsdivdp",
-	XSMADDADP:    "xsmaddadp",
-	XSMADDMDP:    "xsmaddmdp",
-	XSMAXDP:      "xsmaxdp",
-	XSMINDP:      "xsmindp",
-	XSMSUBADP:    "xsmsubadp",
-	XSMSUBMDP:    "xsmsubmdp",
-	XSMULDP:      "xsmuldp",
-	XSNABSDP:     "xsnabsdp",
-	XSNEGDP:      "xsnegdp",
-	XSNMADDADP:   "xsnmaddadp",
-	XSNMADDMDP:   "xsnmaddmdp",
-	XSNMSUBADP:   "xsnmsubadp",
-	XSNMSUBMDP:   "xsnmsubmdp",
-	XSRDPI:       "xsrdpi",
-	XSRDPIC:      "xsrdpic",
-	XSRDPIM:      "xsrdpim",
-	XSRDPIP:      "xsrdpip",
-	XSRDPIZ:      "xsrdpiz",
-	XSREDP:       "xsredp",
-	XSRSQRTEDP:   "xsrsqrtedp",
-	XSSQRTDP:     "xssqrtdp",
-	XSSUBDP:      "xssubdp",
-	XSTDIVDP:     "xstdivdp",
-	XSTSQRTDP:    "xstsqrtdp",
-	XVABSDP:      "xvabsdp",
-	XVABSSP:      "xvabssp",
-	XVADDDP:      "xvadddp",
-	XVADDSP:      "xvaddsp",
-	XVCMPEQDP:    "xvcmpeqdp",
-	XVCMPEQDPCC:  "xvcmpeqdp.",
-	XVCMPEQSP:    "xvcmpeqsp",
-	XVCMPEQSPCC:  "xvcmpeqsp.",
-	XVCMPGEDP:    "xvcmpgedp",
-	XVCMPGEDPCC:  "xvcmpgedp.",
-	XVCMPGESP:    "xvcmpgesp",
-	XVCMPGESPCC:  "xvcmpgesp.",
-	XVCMPGTDP:    "xvcmpgtdp",
-	XVCMPGTDPCC:  "xvcmpgtdp.",
-	XVCMPGTSP:    "xvcmpgtsp",
-	XVCMPGTSPCC:  "xvcmpgtsp.",
-	XVCPSGNDP:    "xvcpsgndp",
-	XVCPSGNSP:    "xvcpsgnsp",
-	XVCVDPSP:     "xvcvdpsp",
-	XVCVDPSXDS:   "xvcvdpsxds",
-	XVCVDPSXWS:   "xvcvdpsxws",
-	XVCVDPUXDS:   "xvcvdpuxds",
-	XVCVDPUXWS:   "xvcvdpuxws",
-	XVCVSPDP:     "xvcvspdp",
-	XVCVSPSXDS:   "xvcvspsxds",
-	XVCVSPSXWS:   "xvcvspsxws",
-	XVCVSPUXDS:   "xvcvspuxds",
-	XVCVSPUXWS:   "xvcvspuxws",
-	XVCVSXDDP:    "xvcvsxddp",
-	XVCVSXDSP:    "xvcvsxdsp",
-	XVCVSXWDP:    "xvcvsxwdp",
-	XVCVSXWSP:    "xvcvsxwsp",
-	XVCVUXDDP:    "xvcvuxddp",
-	XVCVUXDSP:    "xvcvuxdsp",
-	XVCVUXWDP:    "xvcvuxwdp",
-	XVCVUXWSP:    "xvcvuxwsp",
-	XVDIVDP:      "xvdivdp",
-	XVDIVSP:      "xvdivsp",
-	XVMADDADP:    "xvmaddadp",
-	XVMADDASP:    "xvmaddasp",
-	XVMADDMDP:    "xvmaddmdp",
-	XVMADDMSP:    "xvmaddmsp",
-	XVMAXDP:      "xvmaxdp",
-	XVMAXSP:      "xvmaxsp",
-	XVMINDP:      "xvmindp",
-	XVMINSP:      "xvminsp",
-	XVMSUBADP:    "xvmsubadp",
-	XVMSUBASP:    "xvmsubasp",
-	XVMSUBMDP:    "xvmsubmdp",
-	XVMSUBMSP:    "xvmsubmsp",
-	XVMULDP:      "xvmuldp",
-	XVMULSP:      "xvmulsp",
-	XVNABSDP:     "xvnabsdp",
-	XVNABSSP:     "xvnabssp",
-	XVNEGDP:      "xvnegdp",
-	XVNEGSP:      "xvnegsp",
-	XVNMADDADP:   "xvnmaddadp",
-	XVNMADDASP:   "xvnmaddasp",
-	XVNMADDMDP:   "xvnmaddmdp",
-	XVNMADDMSP:   "xvnmaddmsp",
-	XVNMSUBADP:   "xvnmsubadp",
-	XVNMSUBASP:   "xvnmsubasp",
-	XVNMSUBMDP:   "xvnmsubmdp",
-	XVNMSUBMSP:   "xvnmsubmsp",
-	XVRDPI:       "xvrdpi",
-	XVRDPIC:      "xvrdpic",
-	XVRDPIM:      "xvrdpim",
-	XVRDPIP:      "xvrdpip",
-	XVRDPIZ:      "xvrdpiz",
-	XVREDP:       "xvredp",
-	XVRESP:       "xvresp",
-	XVRSPI:       "xvrspi",
-	XVRSPIC:      "xvrspic",
-	XVRSPIM:      "xvrspim",
-	XVRSPIP:      "xvrspip",
-	XVRSPIZ:      "xvrspiz",
-	XVRSQRTEDP:   "xvrsqrtedp",
-	XVRSQRTESP:   "xvrsqrtesp",
-	XVSQRTDP:     "xvsqrtdp",
-	XVSQRTSP:     "xvsqrtsp",
-	XVSUBDP:      "xvsubdp",
-	XVSUBSP:      "xvsubsp",
-	XVTDIVDP:     "xvtdivdp",
-	XVTDIVSP:     "xvtdivsp",
-	XVTSQRTDP:    "xvtsqrtdp",
-	XVTSQRTSP:    "xvtsqrtsp",
-	XXLAND:       "xxland",
-	XXLANDC:      "xxlandc",
-	XXLNOR:       "xxlnor",
-	XXLOR:        "xxlor",
-	XXLXOR:       "xxlxor",
-	XXMRGHW:      "xxmrghw",
-	XXMRGLW:      "xxmrglw",
-	XXPERMDI:     "xxpermdi",
-	XXSEL:        "xxsel",
-	XXSLDWI:      "xxsldwi",
-	XXSPLTW:      "xxspltw",
-	CMPB:         "cmpb",
-	DADD:         "dadd",
-	DADDCC:       "dadd.",
-	DADDQ:        "daddq",
-	DADDQCC:      "daddq.",
-	DCFFIXQ:      "dcffixq",
-	DCFFIXQCC:    "dcffixq.",
-	DCMPO:        "dcmpo",
-	DCMPOQ:       "dcmpoq",
-	DCMPU:        "dcmpu",
-	DCMPUQ:       "dcmpuq",
-	DCTDP:        "dctdp",
-	DCTDPCC:      "dctdp.",
-	DCTFIX:       "dctfix",
-	DCTFIXCC:     "dctfix.",
-	DCTFIXQ:      "dctfixq",
-	DCTFIXQCC:    "dctfixq.",
-	DCTQPQ:       "dctqpq",
-	DCTQPQCC:     "dctqpq.",
-	DDEDPD:       "ddedpd",
-	DDEDPDCC:     "ddedpd.",
-	DDEDPDQ:      "ddedpdq",
-	DDEDPDQCC:    "ddedpdq.",
-	DDIV:         "ddiv",
-	DDIVCC:       "ddiv.",
-	DDIVQ:        "ddivq",
-	DDIVQCC:      "ddivq.",
-	DENBCD:       "denbcd",
-	DENBCDCC:     "denbcd.",
-	DENBCDQ:      "denbcdq",
-	DENBCDQCC:    "denbcdq.",
-	DIEX:         "diex",
-	DIEXCC:       "diex.",
-	DIEXQCC:      "diexq.",
-	DIEXQ:        "diexq",
-	DMUL:         "dmul",
-	DMULCC:       "dmul.",
-	DMULQ:        "dmulq",
-	DMULQCC:      "dmulq.",
-	DQUA:         "dqua",
-	DQUACC:       "dqua.",
-	DQUAI:        "dquai",
-	DQUAICC:      "dquai.",
-	DQUAIQ:       "dquaiq",
-	DQUAIQCC:     "dquaiq.",
-	DQUAQ:        "dquaq",
-	DQUAQCC:      "dquaq.",
-	DRDPQ:        "drdpq",
-	DRDPQCC:      "drdpq.",
-	DRINTN:       "drintn",
-	DRINTNCC:     "drintn.",
-	DRINTNQ:      "drintnq",
-	DRINTNQCC:    "drintnq.",
-	DRINTX:       "drintx",
-	DRINTXCC:     "drintx.",
-	DRINTXQ:      "drintxq",
-	DRINTXQCC:    "drintxq.",
-	DRRND:        "drrnd",
-	DRRNDCC:      "drrnd.",
-	DRRNDQ:       "drrndq",
-	DRRNDQCC:     "drrndq.",
-	DRSP:         "drsp",
-	DRSPCC:       "drsp.",
-	DSCLI:        "dscli",
-	DSCLICC:      "dscli.",
-	DSCLIQ:       "dscliq",
-	DSCLIQCC:     "dscliq.",
-	DSCRI:        "dscri",
-	DSCRICC:      "dscri.",
-	DSCRIQ:       "dscriq",
-	DSCRIQCC:     "dscriq.",
-	DSUB:         "dsub",
-	DSUBCC:       "dsub.",
-	DSUBQ:        "dsubq",
-	DSUBQCC:      "dsubq.",
-	DTSTDC:       "dtstdc",
-	DTSTDCQ:      "dtstdcq",
-	DTSTDG:       "dtstdg",
-	DTSTDGQ:      "dtstdgq",
-	DTSTEX:       "dtstex",
-	DTSTEXQ:      "dtstexq",
-	DTSTSF:       "dtstsf",
-	DTSTSFQ:      "dtstsfq",
-	DXEX:         "dxex",
-	DXEXCC:       "dxex.",
-	DXEXQ:        "dxexq",
-	DXEXQCC:      "dxexq.",
-	FCPSGN:       "fcpsgn",
-	FCPSGNCC:     "fcpsgn.",
-	LBZCIX:       "lbzcix",
-	LDCIX:        "ldcix",
-	LFDP:         "lfdp",
-	LFDPX:        "lfdpx",
-	LFIWAX:       "lfiwax",
-	LHZCIX:       "lhzcix",
-	LWZCIX:       "lwzcix",
-	PRTYD:        "prtyd",
-	PRTYW:        "prtyw",
-	SLBFEECC:     "slbfee.",
-	STBCIX:       "stbcix",
-	STDCIX:       "stdcix",
-	STFDP:        "stfdp",
-	STFDPX:       "stfdpx",
-	STHCIX:       "sthcix",
-	STWCIX:       "stwcix",
-	ISEL:         "isel",
-	LVEBX:        "lvebx",
-	LVEHX:        "lvehx",
-	LVEWX:        "lvewx",
-	LVSL:         "lvsl",
-	LVSR:         "lvsr",
-	LVX:          "lvx",
-	LVXL:         "lvxl",
-	MFVSCR:       "mfvscr",
-	MTVSCR:       "mtvscr",
-	STVEBX:       "stvebx",
-	STVEHX:       "stvehx",
-	STVEWX:       "stvewx",
-	STVX:         "stvx",
-	STVXL:        "stvxl",
-	TLBIEL:       "tlbiel",
-	VADDCUW:      "vaddcuw",
-	VADDFP:       "vaddfp",
-	VADDSBS:      "vaddsbs",
-	VADDSHS:      "vaddshs",
-	VADDSWS:      "vaddsws",
-	VADDUBM:      "vaddubm",
-	VADDUBS:      "vaddubs",
-	VADDUHM:      "vadduhm",
-	VADDUHS:      "vadduhs",
-	VADDUWM:      "vadduwm",
-	VADDUWS:      "vadduws",
-	VAND:         "vand",
-	VANDC:        "vandc",
-	VAVGSB:       "vavgsb",
-	VAVGSH:       "vavgsh",
-	VAVGSW:       "vavgsw",
-	VAVGUB:       "vavgub",
-	VAVGUH:       "vavguh",
-	VAVGUW:       "vavguw",
-	VCFSX:        "vcfsx",
-	VCFUX:        "vcfux",
-	VCMPBFP:      "vcmpbfp",
-	VCMPBFPCC:    "vcmpbfp.",
-	VCMPEQFP:     "vcmpeqfp",
-	VCMPEQFPCC:   "vcmpeqfp.",
-	VCMPEQUB:     "vcmpequb",
-	VCMPEQUBCC:   "vcmpequb.",
-	VCMPEQUH:     "vcmpequh",
-	VCMPEQUHCC:   "vcmpequh.",
-	VCMPEQUW:     "vcmpequw",
-	VCMPEQUWCC:   "vcmpequw.",
-	VCMPGEFP:     "vcmpgefp",
-	VCMPGEFPCC:   "vcmpgefp.",
-	VCMPGTFP:     "vcmpgtfp",
-	VCMPGTFPCC:   "vcmpgtfp.",
-	VCMPGTSB:     "vcmpgtsb",
-	VCMPGTSBCC:   "vcmpgtsb.",
-	VCMPGTSH:     "vcmpgtsh",
-	VCMPGTSHCC:   "vcmpgtsh.",
-	VCMPGTSW:     "vcmpgtsw",
-	VCMPGTSWCC:   "vcmpgtsw.",
-	VCMPGTUB:     "vcmpgtub",
-	VCMPGTUBCC:   "vcmpgtub.",
-	VCMPGTUH:     "vcmpgtuh",
-	VCMPGTUHCC:   "vcmpgtuh.",
-	VCMPGTUW:     "vcmpgtuw",
-	VCMPGTUWCC:   "vcmpgtuw.",
-	VCTSXS:       "vctsxs",
-	VCTUXS:       "vctuxs",
-	VEXPTEFP:     "vexptefp",
-	VLOGEFP:      "vlogefp",
-	VMADDFP:      "vmaddfp",
-	VMAXFP:       "vmaxfp",
-	VMAXSB:       "vmaxsb",
-	VMAXSH:       "vmaxsh",
-	VMAXSW:       "vmaxsw",
-	VMAXUB:       "vmaxub",
-	VMAXUH:       "vmaxuh",
-	VMAXUW:       "vmaxuw",
-	VMHADDSHS:    "vmhaddshs",
-	VMHRADDSHS:   "vmhraddshs",
-	VMINFP:       "vminfp",
-	VMINSB:       "vminsb",
-	VMINSH:       "vminsh",
-	VMINSW:       "vminsw",
-	VMINUB:       "vminub",
-	VMINUH:       "vminuh",
-	VMINUW:       "vminuw",
-	VMLADDUHM:    "vmladduhm",
-	VMRGHB:       "vmrghb",
-	VMRGHH:       "vmrghh",
-	VMRGHW:       "vmrghw",
-	VMRGLB:       "vmrglb",
-	VMRGLH:       "vmrglh",
-	VMRGLW:       "vmrglw",
-	VMSUMMBM:     "vmsummbm",
-	VMSUMSHM:     "vmsumshm",
-	VMSUMSHS:     "vmsumshs",
-	VMSUMUBM:     "vmsumubm",
-	VMSUMUHM:     "vmsumuhm",
-	VMSUMUHS:     "vmsumuhs",
-	VMULESB:      "vmulesb",
-	VMULESH:      "vmulesh",
-	VMULEUB:      "vmuleub",
-	VMULEUH:      "vmuleuh",
-	VMULOSB:      "vmulosb",
-	VMULOSH:      "vmulosh",
-	VMULOUB:      "vmuloub",
-	VMULOUH:      "vmulouh",
-	VNMSUBFP:     "vnmsubfp",
-	VNOR:         "vnor",
-	VOR:          "vor",
-	VPERM:        "vperm",
-	VPKPX:        "vpkpx",
-	VPKSHSS:      "vpkshss",
-	VPKSHUS:      "vpkshus",
-	VPKSWSS:      "vpkswss",
-	VPKSWUS:      "vpkswus",
-	VPKUHUM:      "vpkuhum",
-	VPKUHUS:      "vpkuhus",
-	VPKUWUM:      "vpkuwum",
-	VPKUWUS:      "vpkuwus",
-	VREFP:        "vrefp",
-	VRFIM:        "vrfim",
-	VRFIN:        "vrfin",
-	VRFIP:        "vrfip",
-	VRFIZ:        "vrfiz",
-	VRLB:         "vrlb",
-	VRLH:         "vrlh",
-	VRLW:         "vrlw",
-	VRSQRTEFP:    "vrsqrtefp",
-	VSEL:         "vsel",
-	VSL:          "vsl",
-	VSLB:         "vslb",
-	VSLDOI:       "vsldoi",
-	VSLH:         "vslh",
-	VSLO:         "vslo",
-	VSLW:         "vslw",
-	VSPLTB:       "vspltb",
-	VSPLTH:       "vsplth",
-	VSPLTISB:     "vspltisb",
-	VSPLTISH:     "vspltish",
-	VSPLTISW:     "vspltisw",
-	VSPLTW:       "vspltw",
-	VSR:          "vsr",
-	VSRAB:        "vsrab",
-	VSRAH:        "vsrah",
-	VSRAW:        "vsraw",
-	VSRB:         "vsrb",
-	VSRH:         "vsrh",
-	VSRO:         "vsro",
-	VSRW:         "vsrw",
-	VSUBCUW:      "vsubcuw",
-	VSUBFP:       "vsubfp",
-	VSUBSBS:      "vsubsbs",
-	VSUBSHS:      "vsubshs",
-	VSUBSWS:      "vsubsws",
-	VSUBUBM:      "vsububm",
-	VSUBUBS:      "vsububs",
-	VSUBUHM:      "vsubuhm",
-	VSUBUHS:      "vsubuhs",
-	VSUBUWM:      "vsubuwm",
-	VSUBUWS:      "vsubuws",
-	VSUM2SWS:     "vsum2sws",
-	VSUM4SBS:     "vsum4sbs",
-	VSUM4SHS:     "vsum4shs",
-	VSUM4UBS:     "vsum4ubs",
-	VSUMSWS:      "vsumsws",
-	VUPKHPX:      "vupkhpx",
-	VUPKHSB:      "vupkhsb",
-	VUPKHSH:      "vupkhsh",
-	VUPKLPX:      "vupklpx",
-	VUPKLSB:      "vupklsb",
-	VUPKLSH:      "vupklsh",
-	VXOR:         "vxor",
-	FRE:          "fre",
-	FRECC:        "fre.",
-	FRIM:         "frim",
-	FRIMCC:       "frim.",
-	FRIN:         "frin",
-	FRINCC:       "frin.",
-	FRIP:         "frip",
-	FRIPCC:       "frip.",
-	FRIZ:         "friz",
-	FRIZCC:       "friz.",
-	FRSQRTES:     "frsqrtes",
-	FRSQRTESCC:   "frsqrtes.",
-	HRFID:        "hrfid",
-	POPCNTB:      "popcntb",
-	MFOCRF:       "mfocrf",
-	MTOCRF:       "mtocrf",
-	SLBMFEE:      "slbmfee",
-	SLBMFEV:      "slbmfev",
-	SLBMTE:       "slbmte",
-	RFSCV:        "rfscv",
-	SCV:          "scv",
-	LQ:           "lq",
-	STQ:          "stq",
-	CNTLZD:       "cntlzd",
-	CNTLZDCC:     "cntlzd.",
-	DCBF:         "dcbf",
-	DCBST:        "dcbst",
-	DCBT:         "dcbt",
-	DCBTST:       "dcbtst",
-	DIVD:         "divd",
-	DIVDCC:       "divd.",
-	DIVDO:        "divdo",
-	DIVDOCC:      "divdo.",
-	DIVDU:        "divdu",
-	DIVDUCC:      "divdu.",
-	DIVDUO:       "divduo",
-	DIVDUOCC:     "divduo.",
-	DIVW:         "divw",
-	DIVWCC:       "divw.",
-	DIVWO:        "divwo",
-	DIVWOCC:      "divwo.",
-	DIVWU:        "divwu",
-	DIVWUCC:      "divwu.",
-	DIVWUO:       "divwuo",
-	DIVWUOCC:     "divwuo.",
-	EIEIO:        "eieio",
-	EXTSB:        "extsb",
-	EXTSBCC:      "extsb.",
-	EXTSW:        "extsw",
-	EXTSWCC:      "extsw.",
-	FADDS:        "fadds",
-	FADDSCC:      "fadds.",
-	FCFID:        "fcfid",
-	FCFIDCC:      "fcfid.",
-	FCTID:        "fctid",
-	FCTIDCC:      "fctid.",
-	FCTIDZ:       "fctidz",
-	FCTIDZCC:     "fctidz.",
-	FDIVS:        "fdivs",
-	FDIVSCC:      "fdivs.",
-	FMADDS:       "fmadds",
-	FMADDSCC:     "fmadds.",
-	FMSUBS:       "fmsubs",
-	FMSUBSCC:     "fmsubs.",
-	FMULS:        "fmuls",
-	FMULSCC:      "fmuls.",
-	FNMADDS:      "fnmadds",
-	FNMADDSCC:    "fnmadds.",
-	FNMSUBS:      "fnmsubs",
-	FNMSUBSCC:    "fnmsubs.",
-	FRES:         "fres",
-	FRESCC:       "fres.",
-	FRSQRTE:      "frsqrte",
-	FRSQRTECC:    "frsqrte.",
-	FSEL:         "fsel",
-	FSELCC:       "fsel.",
-	FSQRTS:       "fsqrts",
-	FSQRTSCC:     "fsqrts.",
-	FSUBS:        "fsubs",
-	FSUBSCC:      "fsubs.",
-	ICBI:         "icbi",
-	LD:           "ld",
-	LDARX:        "ldarx",
-	LDU:          "ldu",
-	LDUX:         "ldux",
-	LDX:          "ldx",
-	LWA:          "lwa",
-	LWARX:        "lwarx",
-	LWAUX:        "lwaux",
-	LWAX:         "lwax",
-	MFTB:         "mftb",
-	MTMSRD:       "mtmsrd",
-	MULHD:        "mulhd",
-	MULHDCC:      "mulhd.",
-	MULHDU:       "mulhdu",
-	MULHDUCC:     "mulhdu.",
-	MULHW:        "mulhw",
-	MULHWCC:      "mulhw.",
-	MULHWU:       "mulhwu",
-	MULHWUCC:     "mulhwu.",
-	MULLD:        "mulld",
-	MULLDCC:      "mulld.",
-	MULLDO:       "mulldo",
-	MULLDOCC:     "mulldo.",
-	RFID:         "rfid",
-	RLDCL:        "rldcl",
-	RLDCLCC:      "rldcl.",
-	RLDCR:        "rldcr",
-	RLDCRCC:      "rldcr.",
-	RLDIC:        "rldic",
-	RLDICCC:      "rldic.",
-	RLDICL:       "rldicl",
-	RLDICLCC:     "rldicl.",
-	RLDICR:       "rldicr",
-	RLDICRCC:     "rldicr.",
-	RLDIMI:       "rldimi",
-	RLDIMICC:     "rldimi.",
-	SC:           "sc",
-	SLBIA:        "slbia",
-	SLBIE:        "slbie",
-	SLD:          "sld",
-	SLDCC:        "sld.",
-	SRAD:         "srad",
-	SRADCC:       "srad.",
-	SRADI:        "sradi",
-	SRADICC:      "sradi.",
-	SRD:          "srd",
-	SRDCC:        "srd.",
-	STD:          "std",
-	STDCXCC:      "stdcx.",
-	STDU:         "stdu",
-	STDUX:        "stdux",
-	STDX:         "stdx",
-	STFIWX:       "stfiwx",
-	STWCXCC:      "stwcx.",
-	SUBF:         "subf",
-	SUBFCC:       "subf.",
-	SUBFO:        "subfo",
-	SUBFOCC:      "subfo.",
-	TD:           "td",
-	TDI:          "tdi",
-	TLBSYNC:      "tlbsync",
-	FCTIW:        "fctiw",
-	FCTIWCC:      "fctiw.",
-	FCTIWZ:       "fctiwz",
-	FCTIWZCC:     "fctiwz.",
-	FSQRT:        "fsqrt",
-	FSQRTCC:      "fsqrt.",
-	ADD:          "add",
-	ADDCC:        "add.",
-	ADDO:         "addo",
-	ADDOCC:       "addo.",
-	ADDC:         "addc",
-	ADDCCC:       "addc.",
-	ADDCO:        "addco",
-	ADDCOCC:      "addco.",
-	ADDE:         "adde",
-	ADDECC:       "adde.",
-	ADDEO:        "addeo",
-	ADDEOCC:      "addeo.",
-	LI:           "li",
-	ADDI:         "addi",
-	ADDIC:        "addic",
-	ADDICCC:      "addic.",
-	LIS:          "lis",
-	ADDIS:        "addis",
-	ADDME:        "addme",
-	ADDMECC:      "addme.",
-	ADDMEO:       "addmeo",
-	ADDMEOCC:     "addmeo.",
-	ADDZE:        "addze",
-	ADDZECC:      "addze.",
-	ADDZEO:       "addzeo",
-	ADDZEOCC:     "addzeo.",
-	AND:          "and",
-	ANDCC:        "and.",
-	ANDC:         "andc",
-	ANDCCC:       "andc.",
-	ANDICC:       "andi.",
-	ANDISCC:      "andis.",
-	B:            "b",
-	BA:           "ba",
-	BL:           "bl",
-	BLA:          "bla",
-	BC:           "bc",
-	BCA:          "bca",
-	BCL:          "bcl",
-	BCLA:         "bcla",
-	BCCTR:        "bcctr",
-	BCCTRL:       "bcctrl",
-	BCLR:         "bclr",
-	BCLRL:        "bclrl",
-	CMPW:         "cmpw",
-	CMPD:         "cmpd",
-	CMP:          "cmp",
-	CMPWI:        "cmpwi",
-	CMPDI:        "cmpdi",
-	CMPI:         "cmpi",
-	CMPLW:        "cmplw",
-	CMPLD:        "cmpld",
-	CMPL:         "cmpl",
-	CMPLWI:       "cmplwi",
-	CMPLDI:       "cmpldi",
-	CMPLI:        "cmpli",
-	CNTLZW:       "cntlzw",
-	CNTLZWCC:     "cntlzw.",
-	CRAND:        "crand",
-	CRANDC:       "crandc",
-	CREQV:        "creqv",
-	CRNAND:       "crnand",
-	CRNOR:        "crnor",
-	CROR:         "cror",
-	CRORC:        "crorc",
-	CRXOR:        "crxor",
-	DCBZ:         "dcbz",
-	EQV:          "eqv",
-	EQVCC:        "eqv.",
-	EXTSH:        "extsh",
-	EXTSHCC:      "extsh.",
-	FABS:         "fabs",
-	FABSCC:       "fabs.",
-	FADD:         "fadd",
-	FADDCC:       "fadd.",
-	FCMPO:        "fcmpo",
-	FCMPU:        "fcmpu",
-	FDIV:         "fdiv",
-	FDIVCC:       "fdiv.",
-	FMADD:        "fmadd",
-	FMADDCC:      "fmadd.",
-	FMR:          "fmr",
-	FMRCC:        "fmr.",
-	FMSUB:        "fmsub",
-	FMSUBCC:      "fmsub.",
-	FMUL:         "fmul",
-	FMULCC:       "fmul.",
-	FNABS:        "fnabs",
-	FNABSCC:      "fnabs.",
-	FNEG:         "fneg",
-	FNEGCC:       "fneg.",
-	FNMADD:       "fnmadd",
-	FNMADDCC:     "fnmadd.",
-	FNMSUB:       "fnmsub",
-	FNMSUBCC:     "fnmsub.",
-	FRSP:         "frsp",
-	FRSPCC:       "frsp.",
-	FSUB:         "fsub",
-	FSUBCC:       "fsub.",
-	ISYNC:        "isync",
-	LBZ:          "lbz",
-	LBZU:         "lbzu",
-	LBZUX:        "lbzux",
-	LBZX:         "lbzx",
-	LFD:          "lfd",
-	LFDU:         "lfdu",
-	LFDUX:        "lfdux",
-	LFDX:         "lfdx",
-	LFS:          "lfs",
-	LFSU:         "lfsu",
-	LFSUX:        "lfsux",
-	LFSX:         "lfsx",
-	LHA:          "lha",
-	LHAU:         "lhau",
-	LHAUX:        "lhaux",
-	LHAX:         "lhax",
-	LHBRX:        "lhbrx",
-	LHZ:          "lhz",
-	LHZU:         "lhzu",
-	LHZUX:        "lhzux",
-	LHZX:         "lhzx",
-	LMW:          "lmw",
-	LSWI:         "lswi",
-	LSWX:         "lswx",
-	LWBRX:        "lwbrx",
-	LWZ:          "lwz",
-	LWZU:         "lwzu",
-	LWZUX:        "lwzux",
-	LWZX:         "lwzx",
-	MCRF:         "mcrf",
-	MCRFS:        "mcrfs",
-	MFCR:         "mfcr",
-	MFFS:         "mffs",
-	MFFSCC:       "mffs.",
-	MFMSR:        "mfmsr",
-	MFSPR:        "mfspr",
-	MTCRF:        "mtcrf",
-	MTFSB0:       "mtfsb0",
-	MTFSB0CC:     "mtfsb0.",
-	MTFSB1:       "mtfsb1",
-	MTFSB1CC:     "mtfsb1.",
-	MTFSF:        "mtfsf",
-	MTFSFCC:      "mtfsf.",
-	MTFSFI:       "mtfsfi",
-	MTFSFICC:     "mtfsfi.",
-	MTMSR:        "mtmsr",
-	MTSPR:        "mtspr",
-	MULLI:        "mulli",
-	MULLW:        "mullw",
-	MULLWCC:      "mullw.",
-	MULLWO:       "mullwo",
-	MULLWOCC:     "mullwo.",
-	NAND:         "nand",
-	NANDCC:       "nand.",
-	NEG:          "neg",
-	NEGCC:        "neg.",
-	NEGO:         "nego",
-	NEGOCC:       "nego.",
-	NOR:          "nor",
-	NORCC:        "nor.",
-	OR:           "or",
-	ORCC:         "or.",
-	ORC:          "orc",
-	ORCCC:        "orc.",
-	ORI:          "ori",
-	ORIS:         "oris",
-	RLWIMI:       "rlwimi",
-	RLWIMICC:     "rlwimi.",
-	RLWINM:       "rlwinm",
-	RLWINMCC:     "rlwinm.",
-	RLWNM:        "rlwnm",
-	RLWNMCC:      "rlwnm.",
-	SLW:          "slw",
-	SLWCC:        "slw.",
-	SRAW:         "sraw",
-	SRAWCC:       "sraw.",
-	SRAWI:        "srawi",
-	SRAWICC:      "srawi.",
-	SRW:          "srw",
-	SRWCC:        "srw.",
-	STB:          "stb",
-	STBU:         "stbu",
-	STBUX:        "stbux",
-	STBX:         "stbx",
-	STFD:         "stfd",
-	STFDU:        "stfdu",
-	STFDUX:       "stfdux",
-	STFDX:        "stfdx",
-	STFS:         "stfs",
-	STFSU:        "stfsu",
-	STFSUX:       "stfsux",
-	STFSX:        "stfsx",
-	STH:          "sth",
-	STHBRX:       "sthbrx",
-	STHU:         "sthu",
-	STHUX:        "sthux",
-	STHX:         "sthx",
-	STMW:         "stmw",
-	STSWI:        "stswi",
-	STSWX:        "stswx",
-	STW:          "stw",
-	STWBRX:       "stwbrx",
-	STWU:         "stwu",
-	STWUX:        "stwux",
-	STWX:         "stwx",
-	SUBFC:        "subfc",
-	SUBFCCC:      "subfc.",
-	SUBFCO:       "subfco",
-	SUBFCOCC:     "subfco.",
-	SUBFE:        "subfe",
-	SUBFECC:      "subfe.",
-	SUBFEO:       "subfeo",
-	SUBFEOCC:     "subfeo.",
-	SUBFIC:       "subfic",
-	SUBFME:       "subfme",
-	SUBFMECC:     "subfme.",
-	SUBFMEO:      "subfmeo",
-	SUBFMEOCC:    "subfmeo.",
-	SUBFZE:       "subfze",
-	SUBFZECC:     "subfze.",
-	SUBFZEO:      "subfzeo",
-	SUBFZEOCC:    "subfzeo.",
-	SYNC:         "sync",
-	TLBIE:        "tlbie",
-	TW:           "tw",
-	TWI:          "twi",
-	XOR:          "xor",
-	XORCC:        "xor.",
-	XORI:         "xori",
-	XORIS:        "xoris",
+	BRD:            "brd",
+	BRH:            "brh",
+	BRW:            "brw",
+	CFUGED:         "cfuged",
+	CNTLZDM:        "cntlzdm",
+	CNTTZDM:        "cnttzdm",
+	DCFFIXQQ:       "dcffixqq",
+	DCTFIXQQ:       "dctfixqq",
+	LXVKQ:          "lxvkq",
+	LXVP:           "lxvp",
+	LXVPX:          "lxvpx",
+	LXVRBX:         "lxvrbx",
+	LXVRDX:         "lxvrdx",
+	LXVRHX:         "lxvrhx",
+	LXVRWX:         "lxvrwx",
+	MTVSRBM:        "mtvsrbm",
+	MTVSRBMI:       "mtvsrbmi",
+	MTVSRDM:        "mtvsrdm",
+	MTVSRHM:        "mtvsrhm",
+	MTVSRQM:        "mtvsrqm",
+	MTVSRWM:        "mtvsrwm",
+	PADDI:          "paddi",
+	PDEPD:          "pdepd",
+	PEXTD:          "pextd",
+	PLBZ:           "plbz",
+	PLD:            "pld",
+	PLFD:           "plfd",
+	PLFS:           "plfs",
+	PLHA:           "plha",
+	PLHZ:           "plhz",
+	PLQ:            "plq",
+	PLWA:           "plwa",
+	PLWZ:           "plwz",
+	PLXSD:          "plxsd",
+	PLXSSP:         "plxssp",
+	PLXV:           "plxv",
+	PLXVP:          "plxvp",
+	PMXVBF16GER2:   "pmxvbf16ger2",
+	PMXVBF16GER2NN: "pmxvbf16ger2nn",
+	PMXVBF16GER2NP: "pmxvbf16ger2np",
+	PMXVBF16GER2PN: "pmxvbf16ger2pn",
+	PMXVBF16GER2PP: "pmxvbf16ger2pp",
+	PMXVF16GER2:    "pmxvf16ger2",
+	PMXVF16GER2NN:  "pmxvf16ger2nn",
+	PMXVF16GER2NP:  "pmxvf16ger2np",
+	PMXVF16GER2PN:  "pmxvf16ger2pn",
+	PMXVF16GER2PP:  "pmxvf16ger2pp",
+	PMXVF32GER:     "pmxvf32ger",
+	PMXVF32GERNN:   "pmxvf32gernn",
+	PMXVF32GERNP:   "pmxvf32gernp",
+	PMXVF32GERPN:   "pmxvf32gerpn",
+	PMXVF32GERPP:   "pmxvf32gerpp",
+	PMXVF64GER:     "pmxvf64ger",
+	PMXVF64GERNN:   "pmxvf64gernn",
+	PMXVF64GERNP:   "pmxvf64gernp",
+	PMXVF64GERPN:   "pmxvf64gerpn",
+	PMXVF64GERPP:   "pmxvf64gerpp",
+	PMXVI16GER2:    "pmxvi16ger2",
+	PMXVI16GER2PP:  "pmxvi16ger2pp",
+	PMXVI16GER2S:   "pmxvi16ger2s",
+	PMXVI16GER2SPP: "pmxvi16ger2spp",
+	PMXVI4GER8:     "pmxvi4ger8",
+	PMXVI4GER8PP:   "pmxvi4ger8pp",
+	PMXVI8GER4:     "pmxvi8ger4",
+	PMXVI8GER4PP:   "pmxvi8ger4pp",
+	PMXVI8GER4SPP:  "pmxvi8ger4spp",
+	PNOP:           "pnop",
+	PSTB:           "pstb",
+	PSTD:           "pstd",
+	PSTFD:          "pstfd",
+	PSTFS:          "pstfs",
+	PSTH:           "psth",
+	PSTQ:           "pstq",
+	PSTW:           "pstw",
+	PSTXSD:         "pstxsd",
+	PSTXSSP:        "pstxssp",
+	PSTXV:          "pstxv",
+	PSTXVP:         "pstxvp",
+	SETBC:          "setbc",
+	SETBCR:         "setbcr",
+	SETNBC:         "setnbc",
+	SETNBCR:        "setnbcr",
+	STXVP:          "stxvp",
+	STXVPX:         "stxvpx",
+	STXVRBX:        "stxvrbx",
+	STXVRDX:        "stxvrdx",
+	STXVRHX:        "stxvrhx",
+	STXVRWX:        "stxvrwx",
+	VCFUGED:        "vcfuged",
+	VCLRLB:         "vclrlb",
+	VCLRRB:         "vclrrb",
+	VCLZDM:         "vclzdm",
+	VCMPEQUQ:       "vcmpequq",
+	VCMPEQUQCC:     "vcmpequq.",
+	VCMPGTSQ:       "vcmpgtsq",
+	VCMPGTSQCC:     "vcmpgtsq.",
+	VCMPGTUQ:       "vcmpgtuq",
+	VCMPGTUQCC:     "vcmpgtuq.",
+	VCMPSQ:         "vcmpsq",
+	VCMPUQ:         "vcmpuq",
+	VCNTMBB:        "vcntmbb",
+	VCNTMBD:        "vcntmbd",
+	VCNTMBH:        "vcntmbh",
+	VCNTMBW:        "vcntmbw",
+	VCTZDM:         "vctzdm",
+	VDIVESD:        "vdivesd",
+	VDIVESQ:        "vdivesq",
+	VDIVESW:        "vdivesw",
+	VDIVEUD:        "vdiveud",
+	VDIVEUQ:        "vdiveuq",
+	VDIVEUW:        "vdiveuw",
+	VDIVSD:         "vdivsd",
+	VDIVSQ:         "vdivsq",
+	VDIVSW:         "vdivsw",
+	VDIVUD:         "vdivud",
+	VDIVUQ:         "vdivuq",
+	VDIVUW:         "vdivuw",
+	VEXPANDBM:      "vexpandbm",
+	VEXPANDDM:      "vexpanddm",
+	VEXPANDHM:      "vexpandhm",
+	VEXPANDQM:      "vexpandqm",
+	VEXPANDWM:      "vexpandwm",
+	VEXTDDVLX:      "vextddvlx",
+	VEXTDDVRX:      "vextddvrx",
+	VEXTDUBVLX:     "vextdubvlx",
+	VEXTDUBVRX:     "vextdubvrx",
+	VEXTDUHVLX:     "vextduhvlx",
+	VEXTDUHVRX:     "vextduhvrx",
+	VEXTDUWVLX:     "vextduwvlx",
+	VEXTDUWVRX:     "vextduwvrx",
+	VEXTRACTBM:     "vextractbm",
+	VEXTRACTDM:     "vextractdm",
+	VEXTRACTHM:     "vextracthm",
+	VEXTRACTQM:     "vextractqm",
+	VEXTRACTWM:     "vextractwm",
+	VEXTSD2Q:       "vextsd2q",
+	VGNB:           "vgnb",
+	VINSBLX:        "vinsblx",
+	VINSBRX:        "vinsbrx",
+	VINSBVLX:       "vinsbvlx",
+	VINSBVRX:       "vinsbvrx",
+	VINSD:          "vinsd",
+	VINSDLX:        "vinsdlx",
+	VINSDRX:        "vinsdrx",
+	VINSHLX:        "vinshlx",
+	VINSHRX:        "vinshrx",
+	VINSHVLX:       "vinshvlx",
+	VINSHVRX:       "vinshvrx",
+	VINSW:          "vinsw",
+	VINSWLX:        "vinswlx",
+	VINSWRX:        "vinswrx",
+	VINSWVLX:       "vinswvlx",
+	VINSWVRX:       "vinswvrx",
+	VMODSD:         "vmodsd",
+	VMODSQ:         "vmodsq",
+	VMODSW:         "vmodsw",
+	VMODUD:         "vmodud",
+	VMODUQ:         "vmoduq",
+	VMODUW:         "vmoduw",
+	VMSUMCUD:       "vmsumcud",
+	VMULESD:        "vmulesd",
+	VMULEUD:        "vmuleud",
+	VMULHSD:        "vmulhsd",
+	VMULHSW:        "vmulhsw",
+	VMULHUD:        "vmulhud",
+	VMULHUW:        "vmulhuw",
+	VMULLD:         "vmulld",
+	VMULOSD:        "vmulosd",
+	VMULOUD:        "vmuloud",
+	VPDEPD:         "vpdepd",
+	VPEXTD:         "vpextd",
+	VRLQ:           "vrlq",
+	VRLQMI:         "vrlqmi",
+	VRLQNM:         "vrlqnm",
+	VSLDBI:         "vsldbi",
+	VSLQ:           "vslq",
+	VSRAQ:          "vsraq",
+	VSRDBI:         "vsrdbi",
+	VSRQ:           "vsrq",
+	VSTRIBL:        "vstribl",
+	VSTRIBLCC:      "vstribl.",
+	VSTRIBR:        "vstribr",
+	VSTRIBRCC:      "vstribr.",
+	VSTRIHL:        "vstrihl",
+	VSTRIHLCC:      "vstrihl.",
+	VSTRIHR:        "vstrihr",
+	VSTRIHRCC:      "vstrihr.",
+	XSCMPEQQP:      "xscmpeqqp",
+	XSCMPGEQP:      "xscmpgeqp",
+	XSCMPGTQP:      "xscmpgtqp",
+	XSCVQPSQZ:      "xscvqpsqz",
+	XSCVQPUQZ:      "xscvqpuqz",
+	XSCVSQQP:       "xscvsqqp",
+	XSCVUQQP:       "xscvuqqp",
+	XSMAXCQP:       "xsmaxcqp",
+	XSMINCQP:       "xsmincqp",
+	XVBF16GER2:     "xvbf16ger2",
+	XVBF16GER2NN:   "xvbf16ger2nn",
+	XVBF16GER2NP:   "xvbf16ger2np",
+	XVBF16GER2PN:   "xvbf16ger2pn",
+	XVBF16GER2PP:   "xvbf16ger2pp",
+	XVCVBF16SPN:    "xvcvbf16spn",
+	XVCVSPBF16:     "xvcvspbf16",
+	XVF16GER2:      "xvf16ger2",
+	XVF16GER2NN:    "xvf16ger2nn",
+	XVF16GER2NP:    "xvf16ger2np",
+	XVF16GER2PN:    "xvf16ger2pn",
+	XVF16GER2PP:    "xvf16ger2pp",
+	XVF32GER:       "xvf32ger",
+	XVF32GERNN:     "xvf32gernn",
+	XVF32GERNP:     "xvf32gernp",
+	XVF32GERPN:     "xvf32gerpn",
+	XVF32GERPP:     "xvf32gerpp",
+	XVF64GER:       "xvf64ger",
+	XVF64GERNN:     "xvf64gernn",
+	XVF64GERNP:     "xvf64gernp",
+	XVF64GERPN:     "xvf64gerpn",
+	XVF64GERPP:     "xvf64gerpp",
+	XVI16GER2:      "xvi16ger2",
+	XVI16GER2PP:    "xvi16ger2pp",
+	XVI16GER2S:     "xvi16ger2s",
+	XVI16GER2SPP:   "xvi16ger2spp",
+	XVI4GER8:       "xvi4ger8",
+	XVI4GER8PP:     "xvi4ger8pp",
+	XVI8GER4:       "xvi8ger4",
+	XVI8GER4PP:     "xvi8ger4pp",
+	XVI8GER4SPP:    "xvi8ger4spp",
+	XVTLSBB:        "xvtlsbb",
+	XXBLENDVB:      "xxblendvb",
+	XXBLENDVD:      "xxblendvd",
+	XXBLENDVH:      "xxblendvh",
+	XXBLENDVW:      "xxblendvw",
+	XXEVAL:         "xxeval",
+	XXGENPCVBM:     "xxgenpcvbm",
+	XXGENPCVDM:     "xxgenpcvdm",
+	XXGENPCVHM:     "xxgenpcvhm",
+	XXGENPCVWM:     "xxgenpcvwm",
+	XXMFACC:        "xxmfacc",
+	XXMTACC:        "xxmtacc",
+	XXPERMX:        "xxpermx",
+	XXSETACCZ:      "xxsetaccz",
+	XXSPLTI32DX:    "xxsplti32dx",
+	XXSPLTIDP:      "xxspltidp",
+	XXSPLTIW:       "xxspltiw",
+	MSGCLRU:        "msgclru",
+	MSGSNDU:        "msgsndu",
+	URFID:          "urfid",
+	ADDEX:          "addex",
+	MFFSCDRN:       "mffscdrn",
+	MFFSCDRNI:      "mffscdrni",
+	MFFSCE:         "mffsce",
+	MFFSCRN:        "mffscrn",
+	MFFSCRNI:       "mffscrni",
+	MFFSL:          "mffsl",
+	SLBIAG:         "slbiag",
+	VMSUMUDM:       "vmsumudm",
+	ADDPCIS:        "addpcis",
+	BCDCFNCC:       "bcdcfn.",
+	BCDCFSQCC:      "bcdcfsq.",
+	BCDCFZCC:       "bcdcfz.",
+	BCDCPSGNCC:     "bcdcpsgn.",
+	BCDCTNCC:       "bcdctn.",
+	BCDCTSQCC:      "bcdctsq.",
+	BCDCTZCC:       "bcdctz.",
+	BCDSCC:         "bcds.",
+	BCDSETSGNCC:    "bcdsetsgn.",
+	BCDSRCC:        "bcdsr.",
+	BCDTRUNCCC:     "bcdtrunc.",
+	BCDUSCC:        "bcdus.",
+	BCDUTRUNCCC:    "bcdutrunc.",
+	CMPEQB:         "cmpeqb",
+	CMPRB:          "cmprb",
+	CNTTZD:         "cnttzd",
+	CNTTZDCC:       "cnttzd.",
+	CNTTZW:         "cnttzw",
+	CNTTZWCC:       "cnttzw.",
+	COPY:           "copy",
+	CPABORT:        "cpabort",
+	DARN:           "darn",
+	DTSTSFI:        "dtstsfi",
+	DTSTSFIQ:       "dtstsfiq",
+	EXTSWSLI:       "extswsli",
+	EXTSWSLICC:     "extswsli.",
+	LDAT:           "ldat",
+	LWAT:           "lwat",
+	LXSD:           "lxsd",
+	LXSIBZX:        "lxsibzx",
+	LXSIHZX:        "lxsihzx",
+	LXSSP:          "lxssp",
+	LXV:            "lxv",
+	LXVB16X:        "lxvb16x",
+	LXVH8X:         "lxvh8x",
+	LXVL:           "lxvl",
+	LXVLL:          "lxvll",
+	LXVWSX:         "lxvwsx",
+	LXVX:           "lxvx",
+	MADDHD:         "maddhd",
+	MADDHDU:        "maddhdu",
+	MADDLD:         "maddld",
+	MCRXRX:         "mcrxrx",
+	MFVSRLD:        "mfvsrld",
+	MODSD:          "modsd",
+	MODSW:          "modsw",
+	MODUD:          "modud",
+	MODUW:          "moduw",
+	MSGSYNC:        "msgsync",
+	MTVSRDD:        "mtvsrdd",
+	MTVSRWS:        "mtvsrws",
+	PASTECC:        "paste.",
+	SETB:           "setb",
+	SLBIEG:         "slbieg",
+	SLBSYNC:        "slbsync",
+	STDAT:          "stdat",
+	STOP:           "stop",
+	STWAT:          "stwat",
+	STXSD:          "stxsd",
+	STXSIBX:        "stxsibx",
+	STXSIHX:        "stxsihx",
+	STXSSP:         "stxssp",
+	STXV:           "stxv",
+	STXVB16X:       "stxvb16x",
+	STXVH8X:        "stxvh8x",
+	STXVL:          "stxvl",
+	STXVLL:         "stxvll",
+	STXVX:          "stxvx",
+	VABSDUB:        "vabsdub",
+	VABSDUH:        "vabsduh",
+	VABSDUW:        "vabsduw",
+	VBPERMD:        "vbpermd",
+	VCLZLSBB:       "vclzlsbb",
+	VCMPNEB:        "vcmpneb",
+	VCMPNEBCC:      "vcmpneb.",
+	VCMPNEH:        "vcmpneh",
+	VCMPNEHCC:      "vcmpneh.",
+	VCMPNEW:        "vcmpnew",
+	VCMPNEWCC:      "vcmpnew.",
+	VCMPNEZB:       "vcmpnezb",
+	VCMPNEZBCC:     "vcmpnezb.",
+	VCMPNEZH:       "vcmpnezh",
+	VCMPNEZHCC:     "vcmpnezh.",
+	VCMPNEZW:       "vcmpnezw",
+	VCMPNEZWCC:     "vcmpnezw.",
+	VCTZB:          "vctzb",
+	VCTZD:          "vctzd",
+	VCTZH:          "vctzh",
+	VCTZLSBB:       "vctzlsbb",
+	VCTZW:          "vctzw",
+	VEXTRACTD:      "vextractd",
+	VEXTRACTUB:     "vextractub",
+	VEXTRACTUH:     "vextractuh",
+	VEXTRACTUW:     "vextractuw",
+	VEXTSB2D:       "vextsb2d",
+	VEXTSB2W:       "vextsb2w",
+	VEXTSH2D:       "vextsh2d",
+	VEXTSH2W:       "vextsh2w",
+	VEXTSW2D:       "vextsw2d",
+	VEXTUBLX:       "vextublx",
+	VEXTUBRX:       "vextubrx",
+	VEXTUHLX:       "vextuhlx",
+	VEXTUHRX:       "vextuhrx",
+	VEXTUWLX:       "vextuwlx",
+	VEXTUWRX:       "vextuwrx",
+	VINSERTB:       "vinsertb",
+	VINSERTD:       "vinsertd",
+	VINSERTH:       "vinserth",
+	VINSERTW:       "vinsertw",
+	VMUL10CUQ:      "vmul10cuq",
+	VMUL10ECUQ:     "vmul10ecuq",
+	VMUL10EUQ:      "vmul10euq",
+	VMUL10UQ:       "vmul10uq",
+	VNEGD:          "vnegd",
+	VNEGW:          "vnegw",
+	VPERMR:         "vpermr",
+	VPRTYBD:        "vprtybd",
+	VPRTYBQ:        "vprtybq",
+	VPRTYBW:        "vprtybw",
+	VRLDMI:         "vrldmi",
+	VRLDNM:         "vrldnm",
+	VRLWMI:         "vrlwmi",
+	VRLWNM:         "vrlwnm",
+	VSLV:           "vslv",
+	VSRV:           "vsrv",
+	WAIT:           "wait",
+	XSABSQP:        "xsabsqp",
+	XSADDQP:        "xsaddqp",
+	XSADDQPO:       "xsaddqpo",
+	XSCMPEQDP:      "xscmpeqdp",
+	XSCMPEXPDP:     "xscmpexpdp",
+	XSCMPEXPQP:     "xscmpexpqp",
+	XSCMPGEDP:      "xscmpgedp",
+	XSCMPGTDP:      "xscmpgtdp",
+	XSCMPOQP:       "xscmpoqp",
+	XSCMPUQP:       "xscmpuqp",
+	XSCPSGNQP:      "xscpsgnqp",
+	XSCVDPHP:       "xscvdphp",
+	XSCVDPQP:       "xscvdpqp",
+	XSCVHPDP:       "xscvhpdp",
+	XSCVQPDP:       "xscvqpdp",
+	XSCVQPDPO:      "xscvqpdpo",
+	XSCVQPSDZ:      "xscvqpsdz",
+	XSCVQPSWZ:      "xscvqpswz",
+	XSCVQPUDZ:      "xscvqpudz",
+	XSCVQPUWZ:      "xscvqpuwz",
+	XSCVSDQP:       "xscvsdqp",
+	XSCVUDQP:       "xscvudqp",
+	XSDIVQP:        "xsdivqp",
+	XSDIVQPO:       "xsdivqpo",
+	XSIEXPDP:       "xsiexpdp",
+	XSIEXPQP:       "xsiexpqp",
+	XSMADDQP:       "xsmaddqp",
+	XSMADDQPO:      "xsmaddqpo",
+	XSMAXCDP:       "xsmaxcdp",
+	XSMAXJDP:       "xsmaxjdp",
+	XSMINCDP:       "xsmincdp",
+	XSMINJDP:       "xsminjdp",
+	XSMSUBQP:       "xsmsubqp",
+	XSMSUBQPO:      "xsmsubqpo",
+	XSMULQP:        "xsmulqp",
+	XSMULQPO:       "xsmulqpo",
+	XSNABSQP:       "xsnabsqp",
+	XSNEGQP:        "xsnegqp",
+	XSNMADDQP:      "xsnmaddqp",
+	XSNMADDQPO:     "xsnmaddqpo",
+	XSNMSUBQP:      "xsnmsubqp",
+	XSNMSUBQPO:     "xsnmsubqpo",
+	XSRQPI:         "xsrqpi",
+	XSRQPIX:        "xsrqpix",
+	XSRQPXP:        "xsrqpxp",
+	XSSQRTQP:       "xssqrtqp",
+	XSSQRTQPO:      "xssqrtqpo",
+	XSSUBQP:        "xssubqp",
+	XSSUBQPO:       "xssubqpo",
+	XSTSTDCDP:      "xststdcdp",
+	XSTSTDCQP:      "xststdcqp",
+	XSTSTDCSP:      "xststdcsp",
+	XSXEXPDP:       "xsxexpdp",
+	XSXEXPQP:       "xsxexpqp",
+	XSXSIGDP:       "xsxsigdp",
+	XSXSIGQP:       "xsxsigqp",
+	XVCVHPSP:       "xvcvhpsp",
+	XVCVSPHP:       "xvcvsphp",
+	XVIEXPDP:       "xviexpdp",
+	XVIEXPSP:       "xviexpsp",
+	XVTSTDCDP:      "xvtstdcdp",
+	XVTSTDCSP:      "xvtstdcsp",
+	XVXEXPDP:       "xvxexpdp",
+	XVXEXPSP:       "xvxexpsp",
+	XVXSIGDP:       "xvxsigdp",
+	XVXSIGSP:       "xvxsigsp",
+	XXBRD:          "xxbrd",
+	XXBRH:          "xxbrh",
+	XXBRQ:          "xxbrq",
+	XXBRW:          "xxbrw",
+	XXEXTRACTUW:    "xxextractuw",
+	XXINSERTW:      "xxinsertw",
+	XXPERM:         "xxperm",
+	XXPERMR:        "xxpermr",
+	XXSPLTIB:       "xxspltib",
+	BCDADDCC:       "bcdadd.",
+	BCDSUBCC:       "bcdsub.",
+	BCTAR:          "bctar",
+	BCTARL:         "bctarl",
+	CLRBHRB:        "clrbhrb",
+	FMRGEW:         "fmrgew",
+	FMRGOW:         "fmrgow",
+	ICBT:           "icbt",
+	LQARX:          "lqarx",
+	LXSIWAX:        "lxsiwax",
+	LXSIWZX:        "lxsiwzx",
+	LXSSPX:         "lxsspx",
+	MFBHRBE:        "mfbhrbe",
+	MFVSRD:         "mfvsrd",
+	MFVSRWZ:        "mfvsrwz",
+	MSGCLR:         "msgclr",
+	MSGCLRP:        "msgclrp",
+	MSGSND:         "msgsnd",
+	MSGSNDP:        "msgsndp",
+	MTVSRD:         "mtvsrd",
+	MTVSRWA:        "mtvsrwa",
+	MTVSRWZ:        "mtvsrwz",
+	RFEBB:          "rfebb",
+	STQCXCC:        "stqcx.",
+	STXSIWX:        "stxsiwx",
+	STXSSPX:        "stxsspx",
+	VADDCUQ:        "vaddcuq",
+	VADDECUQ:       "vaddecuq",
+	VADDEUQM:       "vaddeuqm",
+	VADDUDM:        "vaddudm",
+	VADDUQM:        "vadduqm",
+	VBPERMQ:        "vbpermq",
+	VCIPHER:        "vcipher",
+	VCIPHERLAST:    "vcipherlast",
+	VCLZB:          "vclzb",
+	VCLZD:          "vclzd",
+	VCLZH:          "vclzh",
+	VCLZW:          "vclzw",
+	VCMPEQUD:       "vcmpequd",
+	VCMPEQUDCC:     "vcmpequd.",
+	VCMPGTSD:       "vcmpgtsd",
+	VCMPGTSDCC:     "vcmpgtsd.",
+	VCMPGTUD:       "vcmpgtud",
+	VCMPGTUDCC:     "vcmpgtud.",
+	VEQV:           "veqv",
+	VGBBD:          "vgbbd",
+	VMAXSD:         "vmaxsd",
+	VMAXUD:         "vmaxud",
+	VMINSD:         "vminsd",
+	VMINUD:         "vminud",
+	VMRGEW:         "vmrgew",
+	VMRGOW:         "vmrgow",
+	VMULESW:        "vmulesw",
+	VMULEUW:        "vmuleuw",
+	VMULOSW:        "vmulosw",
+	VMULOUW:        "vmulouw",
+	VMULUWM:        "vmuluwm",
+	VNAND:          "vnand",
+	VNCIPHER:       "vncipher",
+	VNCIPHERLAST:   "vncipherlast",
+	VORC:           "vorc",
+	VPERMXOR:       "vpermxor",
+	VPKSDSS:        "vpksdss",
+	VPKSDUS:        "vpksdus",
+	VPKUDUM:        "vpkudum",
+	VPKUDUS:        "vpkudus",
+	VPMSUMB:        "vpmsumb",
+	VPMSUMD:        "vpmsumd",
+	VPMSUMH:        "vpmsumh",
+	VPMSUMW:        "vpmsumw",
+	VPOPCNTB:       "vpopcntb",
+	VPOPCNTD:       "vpopcntd",
+	VPOPCNTH:       "vpopcnth",
+	VPOPCNTW:       "vpopcntw",
+	VRLD:           "vrld",
+	VSBOX:          "vsbox",
+	VSHASIGMAD:     "vshasigmad",
+	VSHASIGMAW:     "vshasigmaw",
+	VSLD:           "vsld",
+	VSRAD:          "vsrad",
+	VSRD:           "vsrd",
+	VSUBCUQ:        "vsubcuq",
+	VSUBECUQ:       "vsubecuq",
+	VSUBEUQM:       "vsubeuqm",
+	VSUBUDM:        "vsubudm",
+	VSUBUQM:        "vsubuqm",
+	VUPKHSW:        "vupkhsw",
+	VUPKLSW:        "vupklsw",
+	XSADDSP:        "xsaddsp",
+	XSCVDPSPN:      "xscvdpspn",
+	XSCVSPDPN:      "xscvspdpn",
+	XSCVSXDSP:      "xscvsxdsp",
+	XSCVUXDSP:      "xscvuxdsp",
+	XSDIVSP:        "xsdivsp",
+	XSMADDASP:      "xsmaddasp",
+	XSMADDMSP:      "xsmaddmsp",
+	XSMSUBASP:      "xsmsubasp",
+	XSMSUBMSP:      "xsmsubmsp",
+	XSMULSP:        "xsmulsp",
+	XSNMADDASP:     "xsnmaddasp",
+	XSNMADDMSP:     "xsnmaddmsp",
+	XSNMSUBASP:     "xsnmsubasp",
+	XSNMSUBMSP:     "xsnmsubmsp",
+	XSRESP:         "xsresp",
+	XSRSP:          "xsrsp",
+	XSRSQRTESP:     "xsrsqrtesp",
+	XSSQRTSP:       "xssqrtsp",
+	XSSUBSP:        "xssubsp",
+	XXLEQV:         "xxleqv",
+	XXLNAND:        "xxlnand",
+	XXLORC:         "xxlorc",
+	ADDG6S:         "addg6s",
+	BPERMD:         "bpermd",
+	CBCDTD:         "cbcdtd",
+	CDTBCD:         "cdtbcd",
+	DCFFIX:         "dcffix",
+	DCFFIXCC:       "dcffix.",
+	DIVDE:          "divde",
+	DIVDECC:        "divde.",
+	DIVDEO:         "divdeo",
+	DIVDEOCC:       "divdeo.",
+	DIVDEU:         "divdeu",
+	DIVDEUCC:       "divdeu.",
+	DIVDEUO:        "divdeuo",
+	DIVDEUOCC:      "divdeuo.",
+	DIVWE:          "divwe",
+	DIVWECC:        "divwe.",
+	DIVWEO:         "divweo",
+	DIVWEOCC:       "divweo.",
+	DIVWEU:         "divweu",
+	DIVWEUCC:       "divweu.",
+	DIVWEUO:        "divweuo",
+	DIVWEUOCC:      "divweuo.",
+	FCFIDS:         "fcfids",
+	FCFIDSCC:       "fcfids.",
+	FCFIDU:         "fcfidu",
+	FCFIDUCC:       "fcfidu.",
+	FCFIDUS:        "fcfidus",
+	FCFIDUSCC:      "fcfidus.",
+	FCTIDU:         "fctidu",
+	FCTIDUCC:       "fctidu.",
+	FCTIDUZ:        "fctiduz",
+	FCTIDUZCC:      "fctiduz.",
+	FCTIWU:         "fctiwu",
+	FCTIWUCC:       "fctiwu.",
+	FCTIWUZ:        "fctiwuz",
+	FCTIWUZCC:      "fctiwuz.",
+	FTDIV:          "ftdiv",
+	FTSQRT:         "ftsqrt",
+	LBARX:          "lbarx",
+	LDBRX:          "ldbrx",
+	LFIWZX:         "lfiwzx",
+	LHARX:          "lharx",
+	LXSDX:          "lxsdx",
+	LXVD2X:         "lxvd2x",
+	LXVDSX:         "lxvdsx",
+	LXVW4X:         "lxvw4x",
+	POPCNTD:        "popcntd",
+	POPCNTW:        "popcntw",
+	STBCXCC:        "stbcx.",
+	STDBRX:         "stdbrx",
+	STHCXCC:        "sthcx.",
+	STXSDX:         "stxsdx",
+	STXVD2X:        "stxvd2x",
+	STXVW4X:        "stxvw4x",
+	XSABSDP:        "xsabsdp",
+	XSADDDP:        "xsadddp",
+	XSCMPODP:       "xscmpodp",
+	XSCMPUDP:       "xscmpudp",
+	XSCPSGNDP:      "xscpsgndp",
+	XSCVDPSP:       "xscvdpsp",
+	XSCVDPSXDS:     "xscvdpsxds",
+	XSCVDPSXWS:     "xscvdpsxws",
+	XSCVDPUXDS:     "xscvdpuxds",
+	XSCVDPUXWS:     "xscvdpuxws",
+	XSCVSPDP:       "xscvspdp",
+	XSCVSXDDP:      "xscvsxddp",
+	XSCVUXDDP:      "xscvuxddp",
+	XSDIVDP:        "xsdivdp",
+	XSMADDADP:      "xsmaddadp",
+	XSMADDMDP:      "xsmaddmdp",
+	XSMAXDP:        "xsmaxdp",
+	XSMINDP:        "xsmindp",
+	XSMSUBADP:      "xsmsubadp",
+	XSMSUBMDP:      "xsmsubmdp",
+	XSMULDP:        "xsmuldp",
+	XSNABSDP:       "xsnabsdp",
+	XSNEGDP:        "xsnegdp",
+	XSNMADDADP:     "xsnmaddadp",
+	XSNMADDMDP:     "xsnmaddmdp",
+	XSNMSUBADP:     "xsnmsubadp",
+	XSNMSUBMDP:     "xsnmsubmdp",
+	XSRDPI:         "xsrdpi",
+	XSRDPIC:        "xsrdpic",
+	XSRDPIM:        "xsrdpim",
+	XSRDPIP:        "xsrdpip",
+	XSRDPIZ:        "xsrdpiz",
+	XSREDP:         "xsredp",
+	XSRSQRTEDP:     "xsrsqrtedp",
+	XSSQRTDP:       "xssqrtdp",
+	XSSUBDP:        "xssubdp",
+	XSTDIVDP:       "xstdivdp",
+	XSTSQRTDP:      "xstsqrtdp",
+	XVABSDP:        "xvabsdp",
+	XVABSSP:        "xvabssp",
+	XVADDDP:        "xvadddp",
+	XVADDSP:        "xvaddsp",
+	XVCMPEQDP:      "xvcmpeqdp",
+	XVCMPEQDPCC:    "xvcmpeqdp.",
+	XVCMPEQSP:      "xvcmpeqsp",
+	XVCMPEQSPCC:    "xvcmpeqsp.",
+	XVCMPGEDP:      "xvcmpgedp",
+	XVCMPGEDPCC:    "xvcmpgedp.",
+	XVCMPGESP:      "xvcmpgesp",
+	XVCMPGESPCC:    "xvcmpgesp.",
+	XVCMPGTDP:      "xvcmpgtdp",
+	XVCMPGTDPCC:    "xvcmpgtdp.",
+	XVCMPGTSP:      "xvcmpgtsp",
+	XVCMPGTSPCC:    "xvcmpgtsp.",
+	XVCPSGNDP:      "xvcpsgndp",
+	XVCPSGNSP:      "xvcpsgnsp",
+	XVCVDPSP:       "xvcvdpsp",
+	XVCVDPSXDS:     "xvcvdpsxds",
+	XVCVDPSXWS:     "xvcvdpsxws",
+	XVCVDPUXDS:     "xvcvdpuxds",
+	XVCVDPUXWS:     "xvcvdpuxws",
+	XVCVSPDP:       "xvcvspdp",
+	XVCVSPSXDS:     "xvcvspsxds",
+	XVCVSPSXWS:     "xvcvspsxws",
+	XVCVSPUXDS:     "xvcvspuxds",
+	XVCVSPUXWS:     "xvcvspuxws",
+	XVCVSXDDP:      "xvcvsxddp",
+	XVCVSXDSP:      "xvcvsxdsp",
+	XVCVSXWDP:      "xvcvsxwdp",
+	XVCVSXWSP:      "xvcvsxwsp",
+	XVCVUXDDP:      "xvcvuxddp",
+	XVCVUXDSP:      "xvcvuxdsp",
+	XVCVUXWDP:      "xvcvuxwdp",
+	XVCVUXWSP:      "xvcvuxwsp",
+	XVDIVDP:        "xvdivdp",
+	XVDIVSP:        "xvdivsp",
+	XVMADDADP:      "xvmaddadp",
+	XVMADDASP:      "xvmaddasp",
+	XVMADDMDP:      "xvmaddmdp",
+	XVMADDMSP:      "xvmaddmsp",
+	XVMAXDP:        "xvmaxdp",
+	XVMAXSP:        "xvmaxsp",
+	XVMINDP:        "xvmindp",
+	XVMINSP:        "xvminsp",
+	XVMSUBADP:      "xvmsubadp",
+	XVMSUBASP:      "xvmsubasp",
+	XVMSUBMDP:      "xvmsubmdp",
+	XVMSUBMSP:      "xvmsubmsp",
+	XVMULDP:        "xvmuldp",
+	XVMULSP:        "xvmulsp",
+	XVNABSDP:       "xvnabsdp",
+	XVNABSSP:       "xvnabssp",
+	XVNEGDP:        "xvnegdp",
+	XVNEGSP:        "xvnegsp",
+	XVNMADDADP:     "xvnmaddadp",
+	XVNMADDASP:     "xvnmaddasp",
+	XVNMADDMDP:     "xvnmaddmdp",
+	XVNMADDMSP:     "xvnmaddmsp",
+	XVNMSUBADP:     "xvnmsubadp",
+	XVNMSUBASP:     "xvnmsubasp",
+	XVNMSUBMDP:     "xvnmsubmdp",
+	XVNMSUBMSP:     "xvnmsubmsp",
+	XVRDPI:         "xvrdpi",
+	XVRDPIC:        "xvrdpic",
+	XVRDPIM:        "xvrdpim",
+	XVRDPIP:        "xvrdpip",
+	XVRDPIZ:        "xvrdpiz",
+	XVREDP:         "xvredp",
+	XVRESP:         "xvresp",
+	XVRSPI:         "xvrspi",
+	XVRSPIC:        "xvrspic",
+	XVRSPIM:        "xvrspim",
+	XVRSPIP:        "xvrspip",
+	XVRSPIZ:        "xvrspiz",
+	XVRSQRTEDP:     "xvrsqrtedp",
+	XVRSQRTESP:     "xvrsqrtesp",
+	XVSQRTDP:       "xvsqrtdp",
+	XVSQRTSP:       "xvsqrtsp",
+	XVSUBDP:        "xvsubdp",
+	XVSUBSP:        "xvsubsp",
+	XVTDIVDP:       "xvtdivdp",
+	XVTDIVSP:       "xvtdivsp",
+	XVTSQRTDP:      "xvtsqrtdp",
+	XVTSQRTSP:      "xvtsqrtsp",
+	XXLAND:         "xxland",
+	XXLANDC:        "xxlandc",
+	XXLNOR:         "xxlnor",
+	XXLOR:          "xxlor",
+	XXLXOR:         "xxlxor",
+	XXMRGHW:        "xxmrghw",
+	XXMRGLW:        "xxmrglw",
+	XXPERMDI:       "xxpermdi",
+	XXSEL:          "xxsel",
+	XXSLDWI:        "xxsldwi",
+	XXSPLTW:        "xxspltw",
+	CMPB:           "cmpb",
+	DADD:           "dadd",
+	DADDCC:         "dadd.",
+	DADDQ:          "daddq",
+	DADDQCC:        "daddq.",
+	DCFFIXQ:        "dcffixq",
+	DCFFIXQCC:      "dcffixq.",
+	DCMPO:          "dcmpo",
+	DCMPOQ:         "dcmpoq",
+	DCMPU:          "dcmpu",
+	DCMPUQ:         "dcmpuq",
+	DCTDP:          "dctdp",
+	DCTDPCC:        "dctdp.",
+	DCTFIX:         "dctfix",
+	DCTFIXCC:       "dctfix.",
+	DCTFIXQ:        "dctfixq",
+	DCTFIXQCC:      "dctfixq.",
+	DCTQPQ:         "dctqpq",
+	DCTQPQCC:       "dctqpq.",
+	DDEDPD:         "ddedpd",
+	DDEDPDCC:       "ddedpd.",
+	DDEDPDQ:        "ddedpdq",
+	DDEDPDQCC:      "ddedpdq.",
+	DDIV:           "ddiv",
+	DDIVCC:         "ddiv.",
+	DDIVQ:          "ddivq",
+	DDIVQCC:        "ddivq.",
+	DENBCD:         "denbcd",
+	DENBCDCC:       "denbcd.",
+	DENBCDQ:        "denbcdq",
+	DENBCDQCC:      "denbcdq.",
+	DIEX:           "diex",
+	DIEXCC:         "diex.",
+	DIEXQCC:        "diexq.",
+	DIEXQ:          "diexq",
+	DMUL:           "dmul",
+	DMULCC:         "dmul.",
+	DMULQ:          "dmulq",
+	DMULQCC:        "dmulq.",
+	DQUA:           "dqua",
+	DQUACC:         "dqua.",
+	DQUAI:          "dquai",
+	DQUAICC:        "dquai.",
+	DQUAIQ:         "dquaiq",
+	DQUAIQCC:       "dquaiq.",
+	DQUAQ:          "dquaq",
+	DQUAQCC:        "dquaq.",
+	DRDPQ:          "drdpq",
+	DRDPQCC:        "drdpq.",
+	DRINTN:         "drintn",
+	DRINTNCC:       "drintn.",
+	DRINTNQ:        "drintnq",
+	DRINTNQCC:      "drintnq.",
+	DRINTX:         "drintx",
+	DRINTXCC:       "drintx.",
+	DRINTXQ:        "drintxq",
+	DRINTXQCC:      "drintxq.",
+	DRRND:          "drrnd",
+	DRRNDCC:        "drrnd.",
+	DRRNDQ:         "drrndq",
+	DRRNDQCC:       "drrndq.",
+	DRSP:           "drsp",
+	DRSPCC:         "drsp.",
+	DSCLI:          "dscli",
+	DSCLICC:        "dscli.",
+	DSCLIQ:         "dscliq",
+	DSCLIQCC:       "dscliq.",
+	DSCRI:          "dscri",
+	DSCRICC:        "dscri.",
+	DSCRIQ:         "dscriq",
+	DSCRIQCC:       "dscriq.",
+	DSUB:           "dsub",
+	DSUBCC:         "dsub.",
+	DSUBQ:          "dsubq",
+	DSUBQCC:        "dsubq.",
+	DTSTDC:         "dtstdc",
+	DTSTDCQ:        "dtstdcq",
+	DTSTDG:         "dtstdg",
+	DTSTDGQ:        "dtstdgq",
+	DTSTEX:         "dtstex",
+	DTSTEXQ:        "dtstexq",
+	DTSTSF:         "dtstsf",
+	DTSTSFQ:        "dtstsfq",
+	DXEX:           "dxex",
+	DXEXCC:         "dxex.",
+	DXEXQ:          "dxexq",
+	DXEXQCC:        "dxexq.",
+	FCPSGN:         "fcpsgn",
+	FCPSGNCC:       "fcpsgn.",
+	LBZCIX:         "lbzcix",
+	LDCIX:          "ldcix",
+	LFDP:           "lfdp",
+	LFDPX:          "lfdpx",
+	LFIWAX:         "lfiwax",
+	LHZCIX:         "lhzcix",
+	LWZCIX:         "lwzcix",
+	PRTYD:          "prtyd",
+	PRTYW:          "prtyw",
+	SLBFEECC:       "slbfee.",
+	STBCIX:         "stbcix",
+	STDCIX:         "stdcix",
+	STFDP:          "stfdp",
+	STFDPX:         "stfdpx",
+	STHCIX:         "sthcix",
+	STWCIX:         "stwcix",
+	ISEL:           "isel",
+	LVEBX:          "lvebx",
+	LVEHX:          "lvehx",
+	LVEWX:          "lvewx",
+	LVSL:           "lvsl",
+	LVSR:           "lvsr",
+	LVX:            "lvx",
+	LVXL:           "lvxl",
+	MFVSCR:         "mfvscr",
+	MTVSCR:         "mtvscr",
+	STVEBX:         "stvebx",
+	STVEHX:         "stvehx",
+	STVEWX:         "stvewx",
+	STVX:           "stvx",
+	STVXL:          "stvxl",
+	TLBIEL:         "tlbiel",
+	VADDCUW:        "vaddcuw",
+	VADDFP:         "vaddfp",
+	VADDSBS:        "vaddsbs",
+	VADDSHS:        "vaddshs",
+	VADDSWS:        "vaddsws",
+	VADDUBM:        "vaddubm",
+	VADDUBS:        "vaddubs",
+	VADDUHM:        "vadduhm",
+	VADDUHS:        "vadduhs",
+	VADDUWM:        "vadduwm",
+	VADDUWS:        "vadduws",
+	VAND:           "vand",
+	VANDC:          "vandc",
+	VAVGSB:         "vavgsb",
+	VAVGSH:         "vavgsh",
+	VAVGSW:         "vavgsw",
+	VAVGUB:         "vavgub",
+	VAVGUH:         "vavguh",
+	VAVGUW:         "vavguw",
+	VCFSX:          "vcfsx",
+	VCFUX:          "vcfux",
+	VCMPBFP:        "vcmpbfp",
+	VCMPBFPCC:      "vcmpbfp.",
+	VCMPEQFP:       "vcmpeqfp",
+	VCMPEQFPCC:     "vcmpeqfp.",
+	VCMPEQUB:       "vcmpequb",
+	VCMPEQUBCC:     "vcmpequb.",
+	VCMPEQUH:       "vcmpequh",
+	VCMPEQUHCC:     "vcmpequh.",
+	VCMPEQUW:       "vcmpequw",
+	VCMPEQUWCC:     "vcmpequw.",
+	VCMPGEFP:       "vcmpgefp",
+	VCMPGEFPCC:     "vcmpgefp.",
+	VCMPGTFP:       "vcmpgtfp",
+	VCMPGTFPCC:     "vcmpgtfp.",
+	VCMPGTSB:       "vcmpgtsb",
+	VCMPGTSBCC:     "vcmpgtsb.",
+	VCMPGTSH:       "vcmpgtsh",
+	VCMPGTSHCC:     "vcmpgtsh.",
+	VCMPGTSW:       "vcmpgtsw",
+	VCMPGTSWCC:     "vcmpgtsw.",
+	VCMPGTUB:       "vcmpgtub",
+	VCMPGTUBCC:     "vcmpgtub.",
+	VCMPGTUH:       "vcmpgtuh",
+	VCMPGTUHCC:     "vcmpgtuh.",
+	VCMPGTUW:       "vcmpgtuw",
+	VCMPGTUWCC:     "vcmpgtuw.",
+	VCTSXS:         "vctsxs",
+	VCTUXS:         "vctuxs",
+	VEXPTEFP:       "vexptefp",
+	VLOGEFP:        "vlogefp",
+	VMADDFP:        "vmaddfp",
+	VMAXFP:         "vmaxfp",
+	VMAXSB:         "vmaxsb",
+	VMAXSH:         "vmaxsh",
+	VMAXSW:         "vmaxsw",
+	VMAXUB:         "vmaxub",
+	VMAXUH:         "vmaxuh",
+	VMAXUW:         "vmaxuw",
+	VMHADDSHS:      "vmhaddshs",
+	VMHRADDSHS:     "vmhraddshs",
+	VMINFP:         "vminfp",
+	VMINSB:         "vminsb",
+	VMINSH:         "vminsh",
+	VMINSW:         "vminsw",
+	VMINUB:         "vminub",
+	VMINUH:         "vminuh",
+	VMINUW:         "vminuw",
+	VMLADDUHM:      "vmladduhm",
+	VMRGHB:         "vmrghb",
+	VMRGHH:         "vmrghh",
+	VMRGHW:         "vmrghw",
+	VMRGLB:         "vmrglb",
+	VMRGLH:         "vmrglh",
+	VMRGLW:         "vmrglw",
+	VMSUMMBM:       "vmsummbm",
+	VMSUMSHM:       "vmsumshm",
+	VMSUMSHS:       "vmsumshs",
+	VMSUMUBM:       "vmsumubm",
+	VMSUMUHM:       "vmsumuhm",
+	VMSUMUHS:       "vmsumuhs",
+	VMULESB:        "vmulesb",
+	VMULESH:        "vmulesh",
+	VMULEUB:        "vmuleub",
+	VMULEUH:        "vmuleuh",
+	VMULOSB:        "vmulosb",
+	VMULOSH:        "vmulosh",
+	VMULOUB:        "vmuloub",
+	VMULOUH:        "vmulouh",
+	VNMSUBFP:       "vnmsubfp",
+	VNOR:           "vnor",
+	VOR:            "vor",
+	VPERM:          "vperm",
+	VPKPX:          "vpkpx",
+	VPKSHSS:        "vpkshss",
+	VPKSHUS:        "vpkshus",
+	VPKSWSS:        "vpkswss",
+	VPKSWUS:        "vpkswus",
+	VPKUHUM:        "vpkuhum",
+	VPKUHUS:        "vpkuhus",
+	VPKUWUM:        "vpkuwum",
+	VPKUWUS:        "vpkuwus",
+	VREFP:          "vrefp",
+	VRFIM:          "vrfim",
+	VRFIN:          "vrfin",
+	VRFIP:          "vrfip",
+	VRFIZ:          "vrfiz",
+	VRLB:           "vrlb",
+	VRLH:           "vrlh",
+	VRLW:           "vrlw",
+	VRSQRTEFP:      "vrsqrtefp",
+	VSEL:           "vsel",
+	VSL:            "vsl",
+	VSLB:           "vslb",
+	VSLDOI:         "vsldoi",
+	VSLH:           "vslh",
+	VSLO:           "vslo",
+	VSLW:           "vslw",
+	VSPLTB:         "vspltb",
+	VSPLTH:         "vsplth",
+	VSPLTISB:       "vspltisb",
+	VSPLTISH:       "vspltish",
+	VSPLTISW:       "vspltisw",
+	VSPLTW:         "vspltw",
+	VSR:            "vsr",
+	VSRAB:          "vsrab",
+	VSRAH:          "vsrah",
+	VSRAW:          "vsraw",
+	VSRB:           "vsrb",
+	VSRH:           "vsrh",
+	VSRO:           "vsro",
+	VSRW:           "vsrw",
+	VSUBCUW:        "vsubcuw",
+	VSUBFP:         "vsubfp",
+	VSUBSBS:        "vsubsbs",
+	VSUBSHS:        "vsubshs",
+	VSUBSWS:        "vsubsws",
+	VSUBUBM:        "vsububm",
+	VSUBUBS:        "vsububs",
+	VSUBUHM:        "vsubuhm",
+	VSUBUHS:        "vsubuhs",
+	VSUBUWM:        "vsubuwm",
+	VSUBUWS:        "vsubuws",
+	VSUM2SWS:       "vsum2sws",
+	VSUM4SBS:       "vsum4sbs",
+	VSUM4SHS:       "vsum4shs",
+	VSUM4UBS:       "vsum4ubs",
+	VSUMSWS:        "vsumsws",
+	VUPKHPX:        "vupkhpx",
+	VUPKHSB:        "vupkhsb",
+	VUPKHSH:        "vupkhsh",
+	VUPKLPX:        "vupklpx",
+	VUPKLSB:        "vupklsb",
+	VUPKLSH:        "vupklsh",
+	VXOR:           "vxor",
+	FRE:            "fre",
+	FRECC:          "fre.",
+	FRIM:           "frim",
+	FRIMCC:         "frim.",
+	FRIN:           "frin",
+	FRINCC:         "frin.",
+	FRIP:           "frip",
+	FRIPCC:         "frip.",
+	FRIZ:           "friz",
+	FRIZCC:         "friz.",
+	FRSQRTES:       "frsqrtes",
+	FRSQRTESCC:     "frsqrtes.",
+	HRFID:          "hrfid",
+	POPCNTB:        "popcntb",
+	MFOCRF:         "mfocrf",
+	MTOCRF:         "mtocrf",
+	SLBMFEE:        "slbmfee",
+	SLBMFEV:        "slbmfev",
+	SLBMTE:         "slbmte",
+	RFSCV:          "rfscv",
+	SCV:            "scv",
+	LQ:             "lq",
+	STQ:            "stq",
+	CNTLZD:         "cntlzd",
+	CNTLZDCC:       "cntlzd.",
+	DCBF:           "dcbf",
+	DCBST:          "dcbst",
+	DCBT:           "dcbt",
+	DCBTST:         "dcbtst",
+	DIVD:           "divd",
+	DIVDCC:         "divd.",
+	DIVDO:          "divdo",
+	DIVDOCC:        "divdo.",
+	DIVDU:          "divdu",
+	DIVDUCC:        "divdu.",
+	DIVDUO:         "divduo",
+	DIVDUOCC:       "divduo.",
+	DIVW:           "divw",
+	DIVWCC:         "divw.",
+	DIVWO:          "divwo",
+	DIVWOCC:        "divwo.",
+	DIVWU:          "divwu",
+	DIVWUCC:        "divwu.",
+	DIVWUO:         "divwuo",
+	DIVWUOCC:       "divwuo.",
+	EIEIO:          "eieio",
+	EXTSB:          "extsb",
+	EXTSBCC:        "extsb.",
+	EXTSW:          "extsw",
+	EXTSWCC:        "extsw.",
+	FADDS:          "fadds",
+	FADDSCC:        "fadds.",
+	FCFID:          "fcfid",
+	FCFIDCC:        "fcfid.",
+	FCTID:          "fctid",
+	FCTIDCC:        "fctid.",
+	FCTIDZ:         "fctidz",
+	FCTIDZCC:       "fctidz.",
+	FDIVS:          "fdivs",
+	FDIVSCC:        "fdivs.",
+	FMADDS:         "fmadds",
+	FMADDSCC:       "fmadds.",
+	FMSUBS:         "fmsubs",
+	FMSUBSCC:       "fmsubs.",
+	FMULS:          "fmuls",
+	FMULSCC:        "fmuls.",
+	FNMADDS:        "fnmadds",
+	FNMADDSCC:      "fnmadds.",
+	FNMSUBS:        "fnmsubs",
+	FNMSUBSCC:      "fnmsubs.",
+	FRES:           "fres",
+	FRESCC:         "fres.",
+	FRSQRTE:        "frsqrte",
+	FRSQRTECC:      "frsqrte.",
+	FSEL:           "fsel",
+	FSELCC:         "fsel.",
+	FSQRTS:         "fsqrts",
+	FSQRTSCC:       "fsqrts.",
+	FSUBS:          "fsubs",
+	FSUBSCC:        "fsubs.",
+	ICBI:           "icbi",
+	LD:             "ld",
+	LDARX:          "ldarx",
+	LDU:            "ldu",
+	LDUX:           "ldux",
+	LDX:            "ldx",
+	LWA:            "lwa",
+	LWARX:          "lwarx",
+	LWAUX:          "lwaux",
+	LWAX:           "lwax",
+	MFTB:           "mftb",
+	MTMSRD:         "mtmsrd",
+	MULHD:          "mulhd",
+	MULHDCC:        "mulhd.",
+	MULHDU:         "mulhdu",
+	MULHDUCC:       "mulhdu.",
+	MULHW:          "mulhw",
+	MULHWCC:        "mulhw.",
+	MULHWU:         "mulhwu",
+	MULHWUCC:       "mulhwu.",
+	MULLD:          "mulld",
+	MULLDCC:        "mulld.",
+	MULLDO:         "mulldo",
+	MULLDOCC:       "mulldo.",
+	RFID:           "rfid",
+	RLDCL:          "rldcl",
+	RLDCLCC:        "rldcl.",
+	RLDCR:          "rldcr",
+	RLDCRCC:        "rldcr.",
+	RLDIC:          "rldic",
+	RLDICCC:        "rldic.",
+	RLDICL:         "rldicl",
+	RLDICLCC:       "rldicl.",
+	RLDICR:         "rldicr",
+	RLDICRCC:       "rldicr.",
+	RLDIMI:         "rldimi",
+	RLDIMICC:       "rldimi.",
+	SC:             "sc",
+	SLBIA:          "slbia",
+	SLBIE:          "slbie",
+	SLD:            "sld",
+	SLDCC:          "sld.",
+	SRAD:           "srad",
+	SRADCC:         "srad.",
+	SRADI:          "sradi",
+	SRADICC:        "sradi.",
+	SRD:            "srd",
+	SRDCC:          "srd.",
+	STD:            "std",
+	STDCXCC:        "stdcx.",
+	STDU:           "stdu",
+	STDUX:          "stdux",
+	STDX:           "stdx",
+	STFIWX:         "stfiwx",
+	STWCXCC:        "stwcx.",
+	SUBF:           "subf",
+	SUBFCC:         "subf.",
+	SUBFO:          "subfo",
+	SUBFOCC:        "subfo.",
+	TD:             "td",
+	TDI:            "tdi",
+	TLBSYNC:        "tlbsync",
+	FCTIW:          "fctiw",
+	FCTIWCC:        "fctiw.",
+	FCTIWZ:         "fctiwz",
+	FCTIWZCC:       "fctiwz.",
+	FSQRT:          "fsqrt",
+	FSQRTCC:        "fsqrt.",
+	ADD:            "add",
+	ADDCC:          "add.",
+	ADDO:           "addo",
+	ADDOCC:         "addo.",
+	ADDC:           "addc",
+	ADDCCC:         "addc.",
+	ADDCO:          "addco",
+	ADDCOCC:        "addco.",
+	ADDE:           "adde",
+	ADDECC:         "adde.",
+	ADDEO:          "addeo",
+	ADDEOCC:        "addeo.",
+	LI:             "li",
+	ADDI:           "addi",
+	ADDIC:          "addic",
+	ADDICCC:        "addic.",
+	LIS:            "lis",
+	ADDIS:          "addis",
+	ADDME:          "addme",
+	ADDMECC:        "addme.",
+	ADDMEO:         "addmeo",
+	ADDMEOCC:       "addmeo.",
+	ADDZE:          "addze",
+	ADDZECC:        "addze.",
+	ADDZEO:         "addzeo",
+	ADDZEOCC:       "addzeo.",
+	AND:            "and",
+	ANDCC:          "and.",
+	ANDC:           "andc",
+	ANDCCC:         "andc.",
+	ANDICC:         "andi.",
+	ANDISCC:        "andis.",
+	B:              "b",
+	BA:             "ba",
+	BL:             "bl",
+	BLA:            "bla",
+	BC:             "bc",
+	BCA:            "bca",
+	BCL:            "bcl",
+	BCLA:           "bcla",
+	BCCTR:          "bcctr",
+	BCCTRL:         "bcctrl",
+	BCLR:           "bclr",
+	BCLRL:          "bclrl",
+	CMPW:           "cmpw",
+	CMPD:           "cmpd",
+	CMP:            "cmp",
+	CMPWI:          "cmpwi",
+	CMPDI:          "cmpdi",
+	CMPI:           "cmpi",
+	CMPLW:          "cmplw",
+	CMPLD:          "cmpld",
+	CMPL:           "cmpl",
+	CMPLWI:         "cmplwi",
+	CMPLDI:         "cmpldi",
+	CMPLI:          "cmpli",
+	CNTLZW:         "cntlzw",
+	CNTLZWCC:       "cntlzw.",
+	CRAND:          "crand",
+	CRANDC:         "crandc",
+	CREQV:          "creqv",
+	CRNAND:         "crnand",
+	CRNOR:          "crnor",
+	CROR:           "cror",
+	CRORC:          "crorc",
+	CRXOR:          "crxor",
+	DCBZ:           "dcbz",
+	EQV:            "eqv",
+	EQVCC:          "eqv.",
+	EXTSH:          "extsh",
+	EXTSHCC:        "extsh.",
+	FABS:           "fabs",
+	FABSCC:         "fabs.",
+	FADD:           "fadd",
+	FADDCC:         "fadd.",
+	FCMPO:          "fcmpo",
+	FCMPU:          "fcmpu",
+	FDIV:           "fdiv",
+	FDIVCC:         "fdiv.",
+	FMADD:          "fmadd",
+	FMADDCC:        "fmadd.",
+	FMR:            "fmr",
+	FMRCC:          "fmr.",
+	FMSUB:          "fmsub",
+	FMSUBCC:        "fmsub.",
+	FMUL:           "fmul",
+	FMULCC:         "fmul.",
+	FNABS:          "fnabs",
+	FNABSCC:        "fnabs.",
+	FNEG:           "fneg",
+	FNEGCC:         "fneg.",
+	FNMADD:         "fnmadd",
+	FNMADDCC:       "fnmadd.",
+	FNMSUB:         "fnmsub",
+	FNMSUBCC:       "fnmsub.",
+	FRSP:           "frsp",
+	FRSPCC:         "frsp.",
+	FSUB:           "fsub",
+	FSUBCC:         "fsub.",
+	ISYNC:          "isync",
+	LBZ:            "lbz",
+	LBZU:           "lbzu",
+	LBZUX:          "lbzux",
+	LBZX:           "lbzx",
+	LFD:            "lfd",
+	LFDU:           "lfdu",
+	LFDUX:          "lfdux",
+	LFDX:           "lfdx",
+	LFS:            "lfs",
+	LFSU:           "lfsu",
+	LFSUX:          "lfsux",
+	LFSX:           "lfsx",
+	LHA:            "lha",
+	LHAU:           "lhau",
+	LHAUX:          "lhaux",
+	LHAX:           "lhax",
+	LHBRX:          "lhbrx",
+	LHZ:            "lhz",
+	LHZU:           "lhzu",
+	LHZUX:          "lhzux",
+	LHZX:           "lhzx",
+	LMW:            "lmw",
+	LSWI:           "lswi",
+	LSWX:           "lswx",
+	LWBRX:          "lwbrx",
+	LWZ:            "lwz",
+	LWZU:           "lwzu",
+	LWZUX:          "lwzux",
+	LWZX:           "lwzx",
+	MCRF:           "mcrf",
+	MCRFS:          "mcrfs",
+	MFCR:           "mfcr",
+	MFFS:           "mffs",
+	MFFSCC:         "mffs.",
+	MFMSR:          "mfmsr",
+	MFSPR:          "mfspr",
+	MTCRF:          "mtcrf",
+	MTFSB0:         "mtfsb0",
+	MTFSB0CC:       "mtfsb0.",
+	MTFSB1:         "mtfsb1",
+	MTFSB1CC:       "mtfsb1.",
+	MTFSF:          "mtfsf",
+	MTFSFCC:        "mtfsf.",
+	MTFSFI:         "mtfsfi",
+	MTFSFICC:       "mtfsfi.",
+	MTMSR:          "mtmsr",
+	MTSPR:          "mtspr",
+	MULLI:          "mulli",
+	MULLW:          "mullw",
+	MULLWCC:        "mullw.",
+	MULLWO:         "mullwo",
+	MULLWOCC:       "mullwo.",
+	NAND:           "nand",
+	NANDCC:         "nand.",
+	NEG:            "neg",
+	NEGCC:          "neg.",
+	NEGO:           "nego",
+	NEGOCC:         "nego.",
+	NOR:            "nor",
+	NORCC:          "nor.",
+	OR:             "or",
+	ORCC:           "or.",
+	ORC:            "orc",
+	ORCCC:          "orc.",
+	NOP:            "nop",
+	ORI:            "ori",
+	ORIS:           "oris",
+	RLWIMI:         "rlwimi",
+	RLWIMICC:       "rlwimi.",
+	RLWINM:         "rlwinm",
+	RLWINMCC:       "rlwinm.",
+	RLWNM:          "rlwnm",
+	RLWNMCC:        "rlwnm.",
+	SLW:            "slw",
+	SLWCC:          "slw.",
+	SRAW:           "sraw",
+	SRAWCC:         "sraw.",
+	SRAWI:          "srawi",
+	SRAWICC:        "srawi.",
+	SRW:            "srw",
+	SRWCC:          "srw.",
+	STB:            "stb",
+	STBU:           "stbu",
+	STBUX:          "stbux",
+	STBX:           "stbx",
+	STFD:           "stfd",
+	STFDU:          "stfdu",
+	STFDUX:         "stfdux",
+	STFDX:          "stfdx",
+	STFS:           "stfs",
+	STFSU:          "stfsu",
+	STFSUX:         "stfsux",
+	STFSX:          "stfsx",
+	STH:            "sth",
+	STHBRX:         "sthbrx",
+	STHU:           "sthu",
+	STHUX:          "sthux",
+	STHX:           "sthx",
+	STMW:           "stmw",
+	STSWI:          "stswi",
+	STSWX:          "stswx",
+	STW:            "stw",
+	STWBRX:         "stwbrx",
+	STWU:           "stwu",
+	STWUX:          "stwux",
+	STWX:           "stwx",
+	SUBFC:          "subfc",
+	SUBFCCC:        "subfc.",
+	SUBFCO:         "subfco",
+	SUBFCOCC:       "subfco.",
+	SUBFE:          "subfe",
+	SUBFECC:        "subfe.",
+	SUBFEO:         "subfeo",
+	SUBFEOCC:       "subfeo.",
+	SUBFIC:         "subfic",
+	SUBFME:         "subfme",
+	SUBFMECC:       "subfme.",
+	SUBFMEO:        "subfmeo",
+	SUBFMEOCC:      "subfmeo.",
+	SUBFZE:         "subfze",
+	SUBFZECC:       "subfze.",
+	SUBFZEO:        "subfzeo",
+	SUBFZEOCC:      "subfzeo.",
+	SYNC:           "sync",
+	TLBIE:          "tlbie",
+	TW:             "tw",
+	TWI:            "twi",
+	XOR:            "xor",
+	XORCC:          "xor.",
+	XORI:           "xori",
+	XORIS:          "xoris",
 }
 
 var (
-	ap_Reg_11_15                     = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{11, 5}}}
-	ap_Reg_6_10                      = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{6, 5}}}
-	ap_Reg_16_20                     = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{16, 5}}}
-	ap_FPReg_6_10                    = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{6, 5}}}
-	ap_VecReg_16_20                  = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{16, 5}}}
-	ap_VecReg_6_10                   = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{6, 5}}}
-	ap_FPReg_16_20                   = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{16, 5}}}
-	ap_VecSReg_31_31_6_10            = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{31, 1}, {6, 5}}}
-	ap_ImmUnsigned_16_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 5}}}
-	ap_VecSpReg_10_10_6_9            = &argField{Type: TypeVecSpReg, Shift: 0, BitFields: BitFields{{10, 1}, {6, 4}}}
-	ap_Offset_16_27_shift4           = &argField{Type: TypeOffset, Shift: 4, BitFields: BitFields{{16, 12}}}
-	ap_ImmUnsigned_16_25_11_15_31_31 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 10}, {11, 5}, {31, 1}}}
-	ap_CondRegBit_11_15              = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{11, 5}}}
-	ap_VecReg_11_15                  = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{11, 5}}}
-	ap_CondRegField_6_8              = &argField{Type: TypeCondRegField, Shift: 0, BitFields: BitFields{{6, 3}}}
-	ap_ImmUnsigned_15_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{15, 1}}}
-	ap_Reg_21_25                     = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{21, 5}}}
-	ap_ImmUnsigned_13_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{13, 3}}}
-	ap_ImmUnsigned_12_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 4}}}
-	ap_VecReg_21_25                  = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{21, 5}}}
-	ap_ImmUnsigned_23_25             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{23, 3}}}
-	ap_MMAReg_6_8                    = &argField{Type: TypeMMAReg, Shift: 0, BitFields: BitFields{{6, 3}}}
-	ap_VecSReg_29_29_11_15           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{29, 1}, {11, 5}}}
-	ap_VecSReg_30_30_16_20           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{30, 1}, {16, 5}}}
-	ap_ImmUnsigned_11_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 5}}}
-	ap_ImmUnsigned_21_22             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{21, 2}}}
-	ap_ImmUnsigned_18_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{18, 3}}}
-	ap_ImmUnsigned_19_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{19, 2}}}
-	ap_ImmSigned_16_25_11_15_31_31   = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{16, 10}, {11, 5}, {31, 1}}}
-	ap_ImmUnsigned_22_22             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 1}}}
-	ap_ImmUnsigned_10_10             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{10, 1}}}
-	ap_ImmUnsigned_14_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{14, 2}}}
-	ap_ImmUnsigned_10_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{10, 6}}}
-	ap_ImmUnsigned_30_30_16_20       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{30, 1}, {16, 5}}}
-	ap_Offset_16_29_shift2           = &argField{Type: TypeOffset, Shift: 2, BitFields: BitFields{{16, 14}}}
-	ap_VecSReg_28_28_6_10            = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1}, {6, 5}}}
-	ap_CondRegField_11_13            = &argField{Type: TypeCondRegField, Shift: 0, BitFields: BitFields{{11, 3}}}
-	ap_ImmUnsigned_9_10              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{9, 2}}}
-	ap_ImmUnsigned_9_15              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{9, 7}}}
-	ap_ImmUnsigned_25_25_29_29_11_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{25, 1}, {29, 1}, {11, 5}}}
-	ap_ImmUnsigned_13_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{13, 8}}}
-	ap_ImmUnsigned_6_10              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 5}}}
-	ap_FPReg_11_15                   = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{11, 5}}}
-	ap_ImmUnsigned_7_10              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{7, 4}}}
-	ap_ImmUnsigned_31_31             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{31, 1}}}
-	ap_SpReg_11_20                   = &argField{Type: TypeSpReg, Shift: 0, BitFields: BitFields{{11, 10}}}
-	ap_ImmUnsigned_20_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{20, 1}}}
-	ap_ImmUnsigned_16_16             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 1}}}
-	ap_ImmUnsigned_17_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{17, 4}}}
-	ap_ImmUnsigned_22_23             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 2}}}
-	ap_VecSReg_28_28_21_25           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1}, {21, 5}}}
-	ap_ImmUnsigned_11_12             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 2}}}
-	ap_ImmUnsigned_11_11             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 1}}}
-	ap_ImmSigned_11_15               = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{11, 5}}}
-	ap_ImmUnsigned_16_21             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 6}}}
-	ap_CondRegBit_21_25              = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{21, 5}}}
-	ap_ImmUnsigned_12_13             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 2}}}
-	ap_ImmUnsigned_14_14             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{14, 1}}}
-	ap_ImmUnsigned_22_25             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 4}}}
-	ap_ImmUnsigned_12_19             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 8}}}
-	ap_ImmUnsigned_20_26             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{20, 7}}}
-	ap_ImmUnsigned_8_10              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{8, 3}}}
-	ap_FPReg_21_25                   = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{21, 5}}}
-	ap_SpReg_16_20_11_15             = &argField{Type: TypeSpReg, Shift: 0, BitFields: BitFields{{16, 5}, {11, 5}}}
-	ap_ImmUnsigned_26_26_21_25       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{26, 1}, {21, 5}}}
-	ap_ImmSigned_16_31               = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{16, 16}}}
-	ap_ImmUnsigned_16_31             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 16}}}
-	ap_PCRel_6_29_shift2             = &argField{Type: TypePCRel, Shift: 2, BitFields: BitFields{{6, 24}}}
-	ap_Label_6_29_shift2             = &argField{Type: TypeLabel, Shift: 2, BitFields: BitFields{{6, 24}}}
-	ap_PCRel_16_29_shift2            = &argField{Type: TypePCRel, Shift: 2, BitFields: BitFields{{16, 14}}}
-	ap_Label_16_29_shift2            = &argField{Type: TypeLabel, Shift: 2, BitFields: BitFields{{16, 14}}}
-	ap_CondRegBit_6_10               = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{6, 5}}}
-	ap_CondRegBit_16_20              = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{16, 5}}}
-	ap_Offset_16_31                  = &argField{Type: TypeOffset, Shift: 0, BitFields: BitFields{{16, 16}}}
-	ap_ImmUnsigned_7_14              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{7, 8}}}
-	ap_ImmUnsigned_6_6               = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 1}}}
-	ap_ImmUnsigned_6_8               = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 3}}}
-	ap_ImmUnsigned_16_19             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 4}}}
-	ap_ImmUnsigned_21_25             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{21, 5}}}
-	ap_ImmUnsigned_26_30             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{26, 5}}}
+	ap_Reg_11_15                     = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{11, 5, 0}}}
+	ap_Reg_6_10                      = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{6, 5, 0}}}
+	ap_Reg_16_20                     = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{16, 5, 0}}}
+	ap_FPReg_6_10                    = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{6, 5, 0}}}
+	ap_VecReg_16_20                  = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{16, 5, 0}}}
+	ap_VecReg_6_10                   = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{6, 5, 0}}}
+	ap_FPReg_16_20                   = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{16, 5, 0}}}
+	ap_VecSReg_31_31_6_10            = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{31, 1, 0}, {6, 5, 0}}}
+	ap_ImmUnsigned_16_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 5, 0}}}
+	ap_VecSpReg_10_10_6_9            = &argField{Type: TypeVecSpReg, Shift: 0, BitFields: BitFields{{10, 1, 0}, {6, 4, 0}}}
+	ap_Offset_16_27_shift4           = &argField{Type: TypeOffset, Shift: 4, BitFields: BitFields{{16, 12, 0}}}
+	ap_ImmUnsigned_16_25_11_15_31_31 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 10, 0}, {11, 5, 0}, {31, 1, 0}}}
+	ap_Reg_38_42                     = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{6, 5, 1}}}
+	ap_Reg_43_47                     = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{11, 5, 1}}}
+	ap_ImmSigned_14_31_48_63         = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{14, 18, 0}, {16, 16, 1}}}
+	ap_ImmUnsigned_11_11             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 1, 0}}}
+	ap_Offset_14_31_48_63            = &argField{Type: TypeOffset, Shift: 0, BitFields: BitFields{{14, 18, 0}, {16, 16, 1}}}
+	ap_FPReg_38_42                   = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{6, 5, 1}}}
+	ap_VecReg_38_42                  = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{6, 5, 1}}}
+	ap_VecSReg_37_37_38_42           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{5, 1, 1}, {6, 5, 1}}}
+	ap_VecSpReg_42_42_38_41          = &argField{Type: TypeVecSpReg, Shift: 0, BitFields: BitFields{{10, 1, 1}, {6, 4, 1}}}
+	ap_MMAReg_38_40                  = &argField{Type: TypeMMAReg, Shift: 0, BitFields: BitFields{{6, 3, 1}}}
+	ap_VecSReg_61_61_43_47           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{29, 1, 1}, {11, 5, 1}}}
+	ap_VecSReg_62_62_48_52           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{30, 1, 1}, {16, 5, 1}}}
+	ap_ImmUnsigned_24_27             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{24, 4, 0}}}
+	ap_ImmUnsigned_28_31             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{28, 4, 0}}}
+	ap_ImmUnsigned_16_17             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 2, 0}}}
+	ap_ImmUnsigned_28_29             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{28, 2, 0}}}
+	ap_ImmUnsigned_16_23             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 8, 0}}}
+	ap_ImmUnsigned_16_19             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 4, 0}}}
+	ap_CondRegBit_11_15              = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{11, 5, 0}}}
+	ap_VecReg_11_15                  = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{11, 5, 0}}}
+	ap_CondRegField_6_8              = &argField{Type: TypeCondRegField, Shift: 0, BitFields: BitFields{{6, 3, 0}}}
+	ap_ImmUnsigned_15_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{15, 1, 0}}}
+	ap_Reg_21_25                     = &argField{Type: TypeReg, Shift: 0, BitFields: BitFields{{21, 5, 0}}}
+	ap_ImmUnsigned_13_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{13, 3, 0}}}
+	ap_ImmUnsigned_12_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 4, 0}}}
+	ap_VecReg_21_25                  = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{21, 5, 0}}}
+	ap_ImmUnsigned_23_25             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{23, 3, 0}}}
+	ap_MMAReg_6_8                    = &argField{Type: TypeMMAReg, Shift: 0, BitFields: BitFields{{6, 3, 0}}}
+	ap_VecSReg_29_29_11_15           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{29, 1, 0}, {11, 5, 0}}}
+	ap_VecSReg_30_30_16_20           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{30, 1, 0}, {16, 5, 0}}}
+	ap_VecSReg_63_63_38_42           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{31, 1, 1}, {6, 5, 1}}}
+	ap_VecSReg_60_60_53_57           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1, 1}, {21, 5, 1}}}
+	ap_ImmUnsigned_24_31             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{24, 8, 0}}}
+	ap_ImmUnsigned_11_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 5, 0}}}
+	ap_ImmUnsigned_29_31             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{29, 3, 0}}}
+	ap_VecSReg_47_47_38_42           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{15, 1, 1}, {6, 5, 1}}}
+	ap_ImmUnsigned_46_46             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{14, 1, 1}}}
+	ap_ImmUnsigned_16_31_48_63       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 16, 0}, {16, 16, 1}}}
+	ap_ImmUnsigned_21_22             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{21, 2, 0}}}
+	ap_ImmUnsigned_18_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{18, 3, 0}}}
+	ap_ImmUnsigned_19_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{19, 2, 0}}}
+	ap_ImmSigned_16_25_11_15_31_31   = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{16, 10, 0}, {11, 5, 0}, {31, 1, 0}}}
+	ap_ImmUnsigned_22_22             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 1, 0}}}
+	ap_ImmUnsigned_10_10             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{10, 1, 0}}}
+	ap_ImmUnsigned_14_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{14, 2, 0}}}
+	ap_ImmUnsigned_10_15             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{10, 6, 0}}}
+	ap_ImmUnsigned_30_30_16_20       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{30, 1, 0}, {16, 5, 0}}}
+	ap_Offset_16_29_shift2           = &argField{Type: TypeOffset, Shift: 2, BitFields: BitFields{{16, 14, 0}}}
+	ap_VecSReg_28_28_6_10            = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1, 0}, {6, 5, 0}}}
+	ap_CondRegField_11_13            = &argField{Type: TypeCondRegField, Shift: 0, BitFields: BitFields{{11, 3, 0}}}
+	ap_ImmUnsigned_9_10              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{9, 2, 0}}}
+	ap_ImmUnsigned_9_15              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{9, 7, 0}}}
+	ap_ImmUnsigned_25_25_29_29_11_15 = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{25, 1, 0}, {29, 1, 0}, {11, 5, 0}}}
+	ap_ImmUnsigned_13_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{13, 8, 0}}}
+	ap_ImmUnsigned_6_10              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 5, 0}}}
+	ap_FPReg_11_15                   = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{11, 5, 0}}}
+	ap_ImmUnsigned_7_10              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{7, 4, 0}}}
+	ap_ImmUnsigned_31_31             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{31, 1, 0}}}
+	ap_SpReg_11_20                   = &argField{Type: TypeSpReg, Shift: 0, BitFields: BitFields{{11, 10, 0}}}
+	ap_ImmUnsigned_20_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{20, 1, 0}}}
+	ap_ImmUnsigned_16_16             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 1, 0}}}
+	ap_ImmUnsigned_17_20             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{17, 4, 0}}}
+	ap_ImmUnsigned_22_23             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 2, 0}}}
+	ap_VecSReg_28_28_21_25           = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1, 0}, {21, 5, 0}}}
+	ap_ImmUnsigned_11_12             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 2, 0}}}
+	ap_ImmSigned_11_15               = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{11, 5, 0}}}
+	ap_ImmUnsigned_16_21             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 6, 0}}}
+	ap_CondRegBit_21_25              = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{21, 5, 0}}}
+	ap_ImmUnsigned_12_13             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 2, 0}}}
+	ap_ImmUnsigned_14_14             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{14, 1, 0}}}
+	ap_ImmUnsigned_22_25             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 4, 0}}}
+	ap_ImmUnsigned_12_19             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 8, 0}}}
+	ap_ImmUnsigned_20_26             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{20, 7, 0}}}
+	ap_ImmUnsigned_8_10              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{8, 3, 0}}}
+	ap_FPReg_21_25                   = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{21, 5, 0}}}
+	ap_SpReg_16_20_11_15             = &argField{Type: TypeSpReg, Shift: 0, BitFields: BitFields{{16, 5, 0}, {11, 5, 0}}}
+	ap_ImmUnsigned_26_26_21_25       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{26, 1, 0}, {21, 5, 0}}}
+	ap_ImmSigned_16_31               = &argField{Type: TypeImmSigned, Shift: 0, BitFields: BitFields{{16, 16, 0}}}
+	ap_ImmUnsigned_16_31             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{16, 16, 0}}}
+	ap_PCRel_6_29_shift2             = &argField{Type: TypePCRel, Shift: 2, BitFields: BitFields{{6, 24, 0}}}
+	ap_Label_6_29_shift2             = &argField{Type: TypeLabel, Shift: 2, BitFields: BitFields{{6, 24, 0}}}
+	ap_PCRel_16_29_shift2            = &argField{Type: TypePCRel, Shift: 2, BitFields: BitFields{{16, 14, 0}}}
+	ap_Label_16_29_shift2            = &argField{Type: TypeLabel, Shift: 2, BitFields: BitFields{{16, 14, 0}}}
+	ap_CondRegBit_6_10               = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{6, 5, 0}}}
+	ap_CondRegBit_16_20              = &argField{Type: TypeCondRegBit, Shift: 0, BitFields: BitFields{{16, 5, 0}}}
+	ap_Offset_16_31                  = &argField{Type: TypeOffset, Shift: 0, BitFields: BitFields{{16, 16, 0}}}
+	ap_ImmUnsigned_7_14              = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{7, 8, 0}}}
+	ap_ImmUnsigned_6_6               = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 1, 0}}}
+	ap_ImmUnsigned_6_8               = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{6, 3, 0}}}
+	ap_ImmUnsigned_21_25             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{21, 5, 0}}}
+	ap_ImmUnsigned_26_30             = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{26, 5, 0}}}
 )
 
 var instFormats = [...]instFormat{
 	{BRD, 0xfc0007fe, 0x7c000176, 0xf801, // Byte-Reverse Doubleword X-form (brd RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{BRH, 0xfc0007fe, 0x7c0001b6, 0xf801, // Byte-Reverse Halfword X-form (brh RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{BRW, 0xfc0007fe, 0x7c000136, 0xf801, // Byte-Reverse Word X-form (brw RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{CFUGED, 0xfc0007fe, 0x7c0001b8, 0x1, // Centrifuge Doubleword X-form (cfuged RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{CNTLZDM, 0xfc0007fe, 0x7c000076, 0x1, // Count Leading Zeros Doubleword under bit Mask X-form (cntlzdm RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{CNTTZDM, 0xfc0007fe, 0x7c000476, 0x1, // Count Trailing Zeros Doubleword under bit Mask X-form (cnttzdm RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{DCFFIXQQ, 0xfc1f07fe, 0xfc0007c4, 0x1, // DFP Convert From Fixed Quadword Quad X-form (dcffixqq FRTp,VRB)
-		[5]*argField{ap_FPReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_VecReg_16_20}},
 	{DCTFIXQQ, 0xfc1f07fe, 0xfc0107c4, 0x1, // DFP Convert To Fixed Quadword Quad X-form (dctfixqq VRT,FRBp)
-		[5]*argField{ap_VecReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_FPReg_16_20}},
 	{LXVKQ, 0xfc1f07fe, 0xf01f02d0, 0x0, // Load VSX Vector Special Value Quadword X-form (lxvkq XT,UIM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_ImmUnsigned_16_20}},
 	{LXVP, 0xfc00000f, 0x18000000, 0x0, // Load VSX Vector Paired DQ-form (lxvp XTp,DQ(RA))
-		[5]*argField{ap_VecSpReg_10_10_6_9, ap_Offset_16_27_shift4, ap_Reg_11_15}},
+		[6]*argField{ap_VecSpReg_10_10_6_9, ap_Offset_16_27_shift4, ap_Reg_11_15}},
 	{LXVPX, 0xfc0007fe, 0x7c00029a, 0x1, // Load VSX Vector Paired Indexed X-form (lxvpx XTp,RA,RB)
-		[5]*argField{ap_VecSpReg_10_10_6_9, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSpReg_10_10_6_9, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVRBX, 0xfc0007fe, 0x7c00001a, 0x0, // Load VSX Vector Rightmost Byte Indexed X-form (lxvrbx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVRDX, 0xfc0007fe, 0x7c0000da, 0x0, // Load VSX Vector Rightmost Doubleword Indexed X-form (lxvrdx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVRHX, 0xfc0007fe, 0x7c00005a, 0x0, // Load VSX Vector Rightmost Halfword Indexed X-form (lxvrhx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVRWX, 0xfc0007fe, 0x7c00009a, 0x0, // Load VSX Vector Rightmost Word Indexed X-form (lxvrwx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MTVSRBM, 0xfc1f07ff, 0x10100642, 0x0, // Move to VSR Byte Mask VX-form (mtvsrbm VRT,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
 	{MTVSRBMI, 0xfc00003e, 0x10000014, 0x0, // Move To VSR Byte Mask Immediate DX-form (mtvsrbmi VRT,bm)
-		[5]*argField{ap_VecReg_6_10, ap_ImmUnsigned_16_25_11_15_31_31}},
+		[6]*argField{ap_VecReg_6_10, ap_ImmUnsigned_16_25_11_15_31_31}},
 	{MTVSRDM, 0xfc1f07ff, 0x10130642, 0x0, // Move to VSR Doubleword Mask VX-form (mtvsrdm VRT,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
 	{MTVSRHM, 0xfc1f07ff, 0x10110642, 0x0, // Move to VSR Halfword Mask VX-form (mtvsrhm VRT,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
 	{MTVSRQM, 0xfc1f07ff, 0x10140642, 0x0, // Move to VSR Quadword Mask VX-form (mtvsrqm VRT,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
 	{MTVSRWM, 0xfc1f07ff, 0x10120642, 0x0, // Move to VSR Word Mask VX-form (mtvsrwm VRT,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_16_20}},
+	{PADDI, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Add Immediate MLS:D-form (paddi RT,RA,SI,R)
+		[6]*argField{ap_Reg_38_42, ap_Reg_43_47, ap_ImmSigned_14_31_48_63, ap_ImmUnsigned_11_11}},
 	{PDEPD, 0xfc0007fe, 0x7c000138, 0x1, // Parallel Bits Deposit Doubleword X-form (pdepd RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{PEXTD, 0xfc0007fe, 0x7c000178, 0x1, // Parallel Bits Extract Doubleword X-form (pextd RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+	{PLBZ, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Load Byte and Zero MLS:D-form (plbz RT,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLD, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Load Doubleword 8LS:D-form (pld RT,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLFD, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Load Floating-Point Double MLS:D-form (plfd FRT,D(RA),R)
+		[6]*argField{ap_FPReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLFS, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Load Floating-Point Single MLS:D-form (plfs FRT,D(RA),R)
+		[6]*argField{ap_FPReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLHA, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Load Halfword Algebraic MLS:D-form (plha RT,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLHZ, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Load Halfword and Zero MLS:D-form (plhz RT,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLQ, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Load Quadword 8LS:D-form (plq RTp,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLWA, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Load Word Algebraic 8LS:D-form (plwa RT,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLWZ, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Load Word and Zero MLS:D-form (plwz RT,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLXSD, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Load VSX Scalar Doubleword 8LS:D-form (plxsd VRT,D(RA),R)
+		[6]*argField{ap_VecReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLXSSP, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Load VSX Scalar Single-Precision 8LS:D-form (plxssp VRT,D(RA),R)
+		[6]*argField{ap_VecReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLXV, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Load VSX Vector 8LS:D-form (plxv XT,D(RA),R)
+		[6]*argField{ap_VecSReg_37_37_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PLXVP, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Load VSX Vector Paired 8LS:D-form (plxvp XTp,D(RA),R)
+		[6]*argField{ap_VecSpReg_42_42_38_41, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PMXVBF16GER2, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) MMIRR:XX3-form (pmxvbf16ger2 AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVBF16GER2NN, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) Negative multiply, Negative accumulate MMIRR:XX3-form (pmxvbf16ger2nn AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVBF16GER2NP, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) Negative multiply, Positive accumulate MMIRR:XX3-form (pmxvbf16ger2np AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVBF16GER2PN, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) Positive multiply, Negative accumulate MMIRR:XX3-form (pmxvbf16ger2pn AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVBF16GER2PP, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector bfloat16 GER (Rank-2 Update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvbf16ger2pp AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVF16GER2, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) MMIRR:XX3-form (pmxvf16ger2 AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVF16GER2NN, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) Negative multiply, Negative accumulate MMIRR:XX3-form (pmxvf16ger2nn AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVF16GER2NP, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) Negative multiply, Positive accumulate MMIRR:XX3-form (pmxvf16ger2np AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVF16GER2PN, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) Positive multiply, Negative accumulate MMIRR:XX3-form (pmxvf16ger2pn AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVF16GER2PP, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Floating-Point GER (rank-2 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvf16ger2pp AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVF32GER, 0xfff00000, 0x7900000, 0xfff00, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) MMIRR:XX3-form (pmxvf32ger AT,XA,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}},
+	{PMXVF32GERNN, 0xfff00000, 0x7900000, 0xfff00, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate MMIRR:XX3-form (pmxvf32gernn AT,XA,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}},
+	{PMXVF32GERNP, 0xfff00000, 0x7900000, 0xfff00, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate MMIRR:XX3-form (pmxvf32gernp AT,XA,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}},
+	{PMXVF32GERPN, 0xfff00000, 0x7900000, 0xfff00, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate MMIRR:XX3-form (pmxvf32gerpn AT,XA,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}},
+	{PMXVF32GERPP, 0xfff00000, 0x7900000, 0xfff00, // Prefixed Masked VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvf32gerpp AT,XA,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31}},
+	{PMXVF64GER, 0xfff00000, 0x7900000, 0xfff03, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) MMIRR:XX3-form (pmxvf64ger AT,XAp,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}},
+	{PMXVF64GERNN, 0xfff00000, 0x7900000, 0xfff03, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate MMIRR:XX3-form (pmxvf64gernn AT,XAp,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}},
+	{PMXVF64GERNP, 0xfff00000, 0x7900000, 0xfff03, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate MMIRR:XX3-form (pmxvf64gernp AT,XAp,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}},
+	{PMXVF64GERPN, 0xfff00000, 0x7900000, 0xfff03, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate MMIRR:XX3-form (pmxvf64gerpn AT,XAp,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}},
+	{PMXVF64GERPP, 0xfff00000, 0x7900000, 0xfff03, // Prefixed Masked VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvf64gerpp AT,XAp,XB,XMSK,YMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_29}},
+	{PMXVI16GER2, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) MMIRR:XX3-form (pmxvi16ger2 AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVI16GER2PP, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi16ger2pp AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVI16GER2S, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation MMIRR:XX3-form (pmxvi16ger2s AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVI16GER2SPP, 0xfff00000, 0x7900000, 0xf3f00, // Prefixed Masked VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi16ger2spp AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_17}},
+	{PMXVI4GER8, 0xfff00000, 0x7900000, 0xf0000, // Prefixed Masked VSX Vector 4-bit Signed Integer GER (rank-8 update) MMIRR:XX3-form (pmxvi4ger8 AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_23}},
+	{PMXVI4GER8PP, 0xfff00000, 0x7900000, 0xf0000, // Prefixed Masked VSX Vector 4-bit Signed Integer GER (rank-8 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi4ger8pp AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_23}},
+	{PMXVI8GER4, 0xfff00000, 0x7900000, 0xf0f00, // Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) MMIRR:XX3-form (pmxvi8ger4 AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_19}},
+	{PMXVI8GER4PP, 0xfff00000, 0x7900000, 0xf0f00, // Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi8ger4pp AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_19}},
+	{PMXVI8GER4SPP, 0xfff00000, 0x7900000, 0xf0f00, // Prefixed Masked VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) with Saturate Positive multiply, Positive accumulate MMIRR:XX3-form (pmxvi8ger4spp AT,XA,XB,XMSK,YMSK,PMSK)
+		[6]*argField{ap_MMAReg_38_40, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_ImmUnsigned_24_27, ap_ImmUnsigned_28_31, ap_ImmUnsigned_16_19}},
+	{PNOP, 0xfff3fffe, 0x7000000, 0xc0001, // Prefixed Nop MRR:*-form (pnop)
+		[6]*argField{}},
+	{PSTB, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Store Byte MLS:D-form (pstb RS,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTD, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Store Doubleword 8LS:D-form (pstd RS,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTFD, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Store Floating-Point Double MLS:D-form (pstfd FRS,D(RA),R)
+		[6]*argField{ap_FPReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTFS, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Store Floating-Point Single MLS:D-form (pstfs FRS,D(RA),R)
+		[6]*argField{ap_FPReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTH, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Store Halfword MLS:D-form (psth RS,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTQ, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Store Quadword 8LS:D-form (pstq RSp,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTW, 0xff800000, 0x6000000, 0x6c0000, // Prefixed Store Word MLS:D-form (pstw RS,D(RA),R)
+		[6]*argField{ap_Reg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTXSD, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Store VSX Scalar Doubleword 8LS:D-form (pstxsd VRS,D(RA),R)
+		[6]*argField{ap_VecReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTXSSP, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Store VSX Scalar Single-Precision 8LS:D-form (pstxssp VRS,D(RA),R)
+		[6]*argField{ap_VecReg_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTXV, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Store VSX Vector 8LS:D-form (pstxv XS,D(RA),R)
+		[6]*argField{ap_VecSReg_37_37_38_42, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
+	{PSTXVP, 0xff800000, 0x4000000, 0x6c0000, // Prefixed Store VSX Vector Paired 8LS:D-form (pstxvp XSp,D(RA),R)
+		[6]*argField{ap_VecSpReg_42_42_38_41, ap_Offset_14_31_48_63, ap_Reg_43_47, ap_ImmUnsigned_11_11}},
 	{SETBC, 0xfc0007fe, 0x7c000300, 0xf801, // Set Boolean Condition X-form (setbc RT,BI)
-		[5]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}},
 	{SETBCR, 0xfc0007fe, 0x7c000340, 0xf801, // Set Boolean Condition Reverse X-form (setbcr RT,BI)
-		[5]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}},
 	{SETNBC, 0xfc0007fe, 0x7c000380, 0xf801, // Set Negative Boolean Condition X-form (setnbc RT,BI)
-		[5]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}},
 	{SETNBCR, 0xfc0007fe, 0x7c0003c0, 0xf801, // Set Negative Boolean Condition Reverse X-form (setnbcr RT,BI)
-		[5]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_CondRegBit_11_15}},
 	{STXVP, 0xfc00000f, 0x18000001, 0x0, // Store VSX Vector Paired DQ-form (stxvp XSp,DQ(RA))
-		[5]*argField{ap_VecSpReg_10_10_6_9, ap_Offset_16_27_shift4, ap_Reg_11_15}},
+		[6]*argField{ap_VecSpReg_10_10_6_9, ap_Offset_16_27_shift4, ap_Reg_11_15}},
 	{STXVPX, 0xfc0007fe, 0x7c00039a, 0x1, // Store VSX Vector Paired Indexed X-form (stxvpx XSp,RA,RB)
-		[5]*argField{ap_VecSpReg_10_10_6_9, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSpReg_10_10_6_9, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVRBX, 0xfc0007fe, 0x7c00011a, 0x0, // Store VSX Vector Rightmost Byte Indexed X-form (stxvrbx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVRDX, 0xfc0007fe, 0x7c0001da, 0x0, // Store VSX Vector Rightmost Doubleword Indexed X-form (stxvrdx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVRHX, 0xfc0007fe, 0x7c00015a, 0x0, // Store VSX Vector Rightmost Halfword Indexed X-form (stxvrhx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVRWX, 0xfc0007fe, 0x7c00019a, 0x0, // Store VSX Vector Rightmost Word Indexed X-form (stxvrwx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VCFUGED, 0xfc0007ff, 0x1000054d, 0x0, // Vector Centrifuge Doubleword VX-form (vcfuged VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCLRLB, 0xfc0007ff, 0x1000018d, 0x0, // Vector Clear Leftmost Bytes VX-form (vclrlb VRT,VRA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_Reg_16_20}},
 	{VCLRRB, 0xfc0007ff, 0x100001cd, 0x0, // Vector Clear Rightmost Bytes VX-form (vclrrb VRT,VRA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_Reg_16_20}},
 	{VCLZDM, 0xfc0007ff, 0x10000784, 0x0, // Vector Count Leading Zeros Doubleword under bit Mask VX-form (vclzdm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUQ, 0xfc0007ff, 0x100001c7, 0x0, // Vector Compare Equal Quadword VC-form (vcmpequq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUQCC, 0xfc0007ff, 0x100005c7, 0x0, // Vector Compare Equal Quadword VC-form (vcmpequq. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSQ, 0xfc0007ff, 0x10000387, 0x0, // Vector Compare Greater Than Signed Quadword VC-form (vcmpgtsq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSQCC, 0xfc0007ff, 0x10000787, 0x0, // Vector Compare Greater Than Signed Quadword VC-form (vcmpgtsq. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUQ, 0xfc0007ff, 0x10000287, 0x0, // Vector Compare Greater Than Unsigned Quadword VC-form (vcmpgtuq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUQCC, 0xfc0007ff, 0x10000687, 0x0, // Vector Compare Greater Than Unsigned Quadword VC-form (vcmpgtuq. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPSQ, 0xfc0007ff, 0x10000141, 0x600000, // Vector Compare Signed Quadword VX-form (vcmpsq BF,VRA,VRB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPUQ, 0xfc0007ff, 0x10000101, 0x600000, // Vector Compare Unsigned Quadword VX-form (vcmpuq BF,VRA,VRB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCNTMBB, 0xfc1e07ff, 0x10180642, 0x0, // Vector Count Mask Bits Byte VX-form (vcntmbb RT,VRB,MP)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}},
 	{VCNTMBD, 0xfc1e07ff, 0x101e0642, 0x0, // Vector Count Mask Bits Doubleword VX-form (vcntmbd RT,VRB,MP)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}},
 	{VCNTMBH, 0xfc1e07ff, 0x101a0642, 0x0, // Vector Count Mask Bits Halfword VX-form (vcntmbh RT,VRB,MP)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}},
 	{VCNTMBW, 0xfc1e07ff, 0x101c0642, 0x0, // Vector Count Mask Bits Word VX-form (vcntmbw RT,VRB,MP)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_15_15}},
 	{VCTZDM, 0xfc0007ff, 0x100007c4, 0x0, // Vector Count Trailing Zeros Doubleword under bit Mask VX-form (vctzdm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVESD, 0xfc0007ff, 0x100003cb, 0x0, // Vector Divide Extended Signed Doubleword VX-form (vdivesd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVESQ, 0xfc0007ff, 0x1000030b, 0x0, // Vector Divide Extended Signed Quadword VX-form (vdivesq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVESW, 0xfc0007ff, 0x1000038b, 0x0, // Vector Divide Extended Signed Word VX-form (vdivesw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVEUD, 0xfc0007ff, 0x100002cb, 0x0, // Vector Divide Extended Unsigned Doubleword VX-form (vdiveud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVEUQ, 0xfc0007ff, 0x1000020b, 0x0, // Vector Divide Extended Unsigned Quadword VX-form (vdiveuq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVEUW, 0xfc0007ff, 0x1000028b, 0x0, // Vector Divide Extended Unsigned Word VX-form (vdiveuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVSD, 0xfc0007ff, 0x100001cb, 0x0, // Vector Divide Signed Doubleword VX-form (vdivsd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVSQ, 0xfc0007ff, 0x1000010b, 0x0, // Vector Divide Signed Quadword VX-form (vdivsq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVSW, 0xfc0007ff, 0x1000018b, 0x0, // Vector Divide Signed Word VX-form (vdivsw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVUD, 0xfc0007ff, 0x100000cb, 0x0, // Vector Divide Unsigned Doubleword VX-form (vdivud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVUQ, 0xfc0007ff, 0x1000000b, 0x0, // Vector Divide Unsigned Quadword VX-form (vdivuq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VDIVUW, 0xfc0007ff, 0x1000008b, 0x0, // Vector Divide Unsigned Word VX-form (vdivuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VEXPANDBM, 0xfc1f07ff, 0x10000642, 0x0, // Vector Expand Byte Mask VX-form (vexpandbm VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXPANDDM, 0xfc1f07ff, 0x10030642, 0x0, // Vector Expand Doubleword Mask VX-form (vexpanddm VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXPANDHM, 0xfc1f07ff, 0x10010642, 0x0, // Vector Expand Halfword Mask VX-form (vexpandhm VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXPANDQM, 0xfc1f07ff, 0x10040642, 0x0, // Vector Expand Quadword Mask VX-form (vexpandqm VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXPANDWM, 0xfc1f07ff, 0x10020642, 0x0, // Vector Expand Word Mask VX-form (vexpandwm VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXTDDVLX, 0xfc00003f, 0x1000001e, 0x0, // Vector Extract Double Doubleword to VSR using GPR-specified Left-Index VA-form (vextddvlx VRT,VRA,VRB,RC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
 	{VEXTDDVRX, 0xfc00003f, 0x1000001f, 0x0, // Vector Extract Double Doubleword to VSR using GPR-specified Right-Index VA-form (vextddvrx VRT,VRA,VRB,RC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
 	{VEXTDUBVLX, 0xfc00003f, 0x10000018, 0x0, // Vector Extract Double Unsigned Byte to VSR using GPR-specified Left-Index VA-form (vextdubvlx VRT,VRA,VRB,RC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
 	{VEXTDUBVRX, 0xfc00003f, 0x10000019, 0x0, // Vector Extract Double Unsigned Byte to VSR using GPR-specified Right-Index VA-form (vextdubvrx VRT,VRA,VRB,RC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
 	{VEXTDUHVLX, 0xfc00003f, 0x1000001a, 0x0, // Vector Extract Double Unsigned Halfword to VSR using GPR-specified Left-Index VA-form (vextduhvlx VRT,VRA,VRB,RC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
 	{VEXTDUHVRX, 0xfc00003f, 0x1000001b, 0x0, // Vector Extract Double Unsigned Halfword to VSR using GPR-specified Right-Index VA-form (vextduhvrx VRT,VRA,VRB,RC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
 	{VEXTDUWVLX, 0xfc00003f, 0x1000001c, 0x0, // Vector Extract Double Unsigned Word to VSR using GPR-specified Left-Index VA-form (vextduwvlx VRT,VRA,VRB,RC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
 	{VEXTDUWVRX, 0xfc00003f, 0x1000001d, 0x0, // Vector Extract Double Unsigned Word to VSR using GPR-specified Right-Index VA-form (vextduwvrx VRT,VRA,VRB,RC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_Reg_21_25}},
 	{VEXTRACTBM, 0xfc1f07ff, 0x10080642, 0x0, // Vector Extract Byte Mask VX-form (vextractbm RT,VRB)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
 	{VEXTRACTDM, 0xfc1f07ff, 0x100b0642, 0x0, // Vector Extract Doubleword Mask VX-form (vextractdm RT,VRB)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
 	{VEXTRACTHM, 0xfc1f07ff, 0x10090642, 0x0, // Vector Extract Halfword Mask VX-form (vextracthm RT,VRB)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
 	{VEXTRACTQM, 0xfc1f07ff, 0x100c0642, 0x0, // Vector Extract Quadword Mask VX-form (vextractqm RT,VRB)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
 	{VEXTRACTWM, 0xfc1f07ff, 0x100a0642, 0x0, // Vector Extract Word Mask VX-form (vextractwm RT,VRB)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
 	{VEXTSD2Q, 0xfc1f07ff, 0x101b0602, 0x0, // Vector Extend Sign Doubleword to Quadword VX-form (vextsd2q VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VGNB, 0xfc0007ff, 0x100004cc, 0x180000, // Vector Gather every Nth Bit VX-form (vgnb RT,VRB,N)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_13_15}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_13_15}},
 	{VINSBLX, 0xfc0007ff, 0x1000020f, 0x0, // Vector Insert Byte from GPR using GPR-specified Left-Index VX-form (vinsblx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VINSBRX, 0xfc0007ff, 0x1000030f, 0x0, // Vector Insert Byte from GPR using GPR-specified Right-Index VX-form (vinsbrx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VINSBVLX, 0xfc0007ff, 0x1000000f, 0x0, // Vector Insert Byte from VSR using GPR-specified Left-Index VX-form (vinsbvlx VRT,RA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VINSBVRX, 0xfc0007ff, 0x1000010f, 0x0, // Vector Insert Byte from VSR using GPR-specified Right-Index VX-form (vinsbvrx VRT,RA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VINSD, 0xfc0007ff, 0x100001cf, 0x100000, // Vector Insert Doubleword from GPR using immediate-specified index VX-form (vinsd VRT,RB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_16_20, ap_ImmUnsigned_12_15}},
 	{VINSDLX, 0xfc0007ff, 0x100002cf, 0x0, // Vector Insert Doubleword from GPR using GPR-specified Left-Index VX-form (vinsdlx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VINSDRX, 0xfc0007ff, 0x100003cf, 0x0, // Vector Insert Doubleword from GPR using GPR-specified Right-Index VX-form (vinsdrx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VINSHLX, 0xfc0007ff, 0x1000024f, 0x0, // Vector Insert Halfword from GPR using GPR-specified Left-Index VX-form (vinshlx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VINSHRX, 0xfc0007ff, 0x1000034f, 0x0, // Vector Insert Halfword from GPR using GPR-specified Right-Index VX-form (vinshrx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VINSHVLX, 0xfc0007ff, 0x1000004f, 0x0, // Vector Insert Halfword from VSR using GPR-specified Left-Index VX-form (vinshvlx VRT,RA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VINSHVRX, 0xfc0007ff, 0x1000014f, 0x0, // Vector Insert Halfword from VSR using GPR-specified Right-Index VX-form (vinshvrx VRT,RA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VINSW, 0xfc0007ff, 0x100000cf, 0x100000, // Vector Insert Word from GPR using immediate-specified index VX-form (vinsw VRT,RB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_16_20, ap_ImmUnsigned_12_15}},
 	{VINSWLX, 0xfc0007ff, 0x1000028f, 0x0, // Vector Insert Word from GPR using GPR-specified Left-Index VX-form (vinswlx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VINSWRX, 0xfc0007ff, 0x1000038f, 0x0, // Vector Insert Word from GPR using GPR-specified Right-Index VX-form (vinswrx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VINSWVLX, 0xfc0007ff, 0x1000008f, 0x0, // Vector Insert Word from VSR using GPR-specified Left-Index VX-form (vinswvlx VRT,RA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VINSWVRX, 0xfc0007ff, 0x1000018f, 0x0, // Vector Insert Word from VSR using GPR-specified Left-Index VX-form (vinswvrx VRT,RA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VMODSD, 0xfc0007ff, 0x100007cb, 0x0, // Vector Modulo Signed Doubleword VX-form (vmodsd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMODSQ, 0xfc0007ff, 0x1000070b, 0x0, // Vector Modulo Signed Quadword VX-form (vmodsq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMODSW, 0xfc0007ff, 0x1000078b, 0x0, // Vector Modulo Signed Word VX-form (vmodsw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMODUD, 0xfc0007ff, 0x100006cb, 0x0, // Vector Modulo Unsigned Doubleword VX-form (vmodud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMODUQ, 0xfc0007ff, 0x1000060b, 0x0, // Vector Modulo Unsigned Quadword VX-form (vmoduq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMODUW, 0xfc0007ff, 0x1000068b, 0x0, // Vector Modulo Unsigned Word VX-form (vmoduw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMSUMCUD, 0xfc00003f, 0x10000017, 0x0, // Vector Multiply-Sum & write Carry-out Unsigned Doubleword VA-form (vmsumcud VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMULESD, 0xfc0007ff, 0x100003c8, 0x0, // Vector Multiply Even Signed Doubleword VX-form (vmulesd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULEUD, 0xfc0007ff, 0x100002c8, 0x0, // Vector Multiply Even Unsigned Doubleword VX-form (vmuleud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULHSD, 0xfc0007ff, 0x100003c9, 0x0, // Vector Multiply High Signed Doubleword VX-form (vmulhsd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULHSW, 0xfc0007ff, 0x10000389, 0x0, // Vector Multiply High Signed Word VX-form (vmulhsw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULHUD, 0xfc0007ff, 0x100002c9, 0x0, // Vector Multiply High Unsigned Doubleword VX-form (vmulhud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULHUW, 0xfc0007ff, 0x10000289, 0x0, // Vector Multiply High Unsigned Word VX-form (vmulhuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULLD, 0xfc0007ff, 0x100001c9, 0x0, // Vector Multiply Low Doubleword VX-form (vmulld VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULOSD, 0xfc0007ff, 0x100001c8, 0x0, // Vector Multiply Odd Signed Doubleword VX-form (vmulosd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULOUD, 0xfc0007ff, 0x100000c8, 0x0, // Vector Multiply Odd Unsigned Doubleword VX-form (vmuloud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPDEPD, 0xfc0007ff, 0x100005cd, 0x0, // Vector Parallel Bits Deposit Doubleword VX-form (vpdepd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPEXTD, 0xfc0007ff, 0x1000058d, 0x0, // Vector Parallel Bits Extract Doubleword VX-form (vpextd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRLQ, 0xfc0007ff, 0x10000005, 0x0, // Vector Rotate Left Quadword VX-form (vrlq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRLQMI, 0xfc0007ff, 0x10000045, 0x0, // Vector Rotate Left Quadword then Mask Insert VX-form (vrlqmi VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRLQNM, 0xfc0007ff, 0x10000145, 0x0, // Vector Rotate Left Quadword then AND with Mask VX-form (vrlqnm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSLDBI, 0xfc00063f, 0x10000016, 0x0, // Vector Shift Left Double by Bit Immediate VN-form (vsldbi VRT,VRA,VRB,SH)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_23_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_23_25}},
 	{VSLQ, 0xfc0007ff, 0x10000105, 0x0, // Vector Shift Left Quadword VX-form (vslq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRAQ, 0xfc0007ff, 0x10000305, 0x0, // Vector Shift Right Algebraic Quadword VX-form (vsraq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRDBI, 0xfc00063f, 0x10000216, 0x0, // Vector Shift Right Double by Bit Immediate VN-form (vsrdbi VRT,VRA,VRB,SH)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_23_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_23_25}},
 	{VSRQ, 0xfc0007ff, 0x10000205, 0x0, // Vector Shift Right Quadword VX-form (vsrq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSTRIBL, 0xfc1f07ff, 0x1000000d, 0x0, // Vector String Isolate Byte Left-justified VX-form (vstribl VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VSTRIBLCC, 0xfc1f07ff, 0x1000040d, 0x0, // Vector String Isolate Byte Left-justified VX-form (vstribl. VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VSTRIBR, 0xfc1f07ff, 0x1001000d, 0x0, // Vector String Isolate Byte Right-justified VX-form (vstribr VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VSTRIBRCC, 0xfc1f07ff, 0x1001040d, 0x0, // Vector String Isolate Byte Right-justified VX-form (vstribr. VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VSTRIHL, 0xfc1f07ff, 0x1002000d, 0x0, // Vector String Isolate Halfword Left-justified VX-form (vstrihl VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VSTRIHLCC, 0xfc1f07ff, 0x1002040d, 0x0, // Vector String Isolate Halfword Left-justified VX-form (vstrihl. VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VSTRIHR, 0xfc1f07ff, 0x1003000d, 0x0, // Vector String Isolate Halfword Right-justified VX-form (vstrihr VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VSTRIHRCC, 0xfc1f07ff, 0x1003040d, 0x0, // Vector String Isolate Halfword Right-justified VX-form (vstrihr. VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCMPEQQP, 0xfc0007fe, 0xfc000088, 0x1, // VSX Scalar Compare Equal Quad-Precision X-form (xscmpeqqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSCMPGEQP, 0xfc0007fe, 0xfc000188, 0x1, // VSX Scalar Compare Greater Than or Equal Quad-Precision X-form (xscmpgeqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSCMPGTQP, 0xfc0007fe, 0xfc0001c8, 0x1, // VSX Scalar Compare Greater Than Quad-Precision X-form (xscmpgtqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSCVQPSQZ, 0xfc1f07fe, 0xfc080688, 0x1, // VSX Scalar Convert with round to zero Quad-Precision to Signed Quadword X-form (xscvqpsqz VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVQPUQZ, 0xfc1f07fe, 0xfc000688, 0x1, // VSX Scalar Convert with round to zero Quad-Precision to Unsigned Quadword X-form (xscvqpuqz VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVSQQP, 0xfc1f07fe, 0xfc0b0688, 0x1, // VSX Scalar Convert with round Signed Quadword to Quad-Precision X-form (xscvsqqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVUQQP, 0xfc1f07fe, 0xfc030688, 0x1, // VSX Scalar Convert with round Unsigned Quadword to Quad-Precision X-form (xscvuqqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSMAXCQP, 0xfc0007fe, 0xfc000548, 0x1, // VSX Scalar Maximum Type-C Quad-Precision X-form (xsmaxcqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSMINCQP, 0xfc0007fe, 0xfc0005c8, 0x1, // VSX Scalar Minimum Type-C Quad-Precision X-form (xsmincqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XVBF16GER2, 0xfc0007f8, 0xec000198, 0x600001, // VSX Vector bfloat16 GER (Rank-2 Update) XX3-form (xvbf16ger2 AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVBF16GER2NN, 0xfc0007f8, 0xec000790, 0x600001, // VSX Vector bfloat16 GER (Rank-2 Update) Negative multiply, Negative accumulate XX3-form (xvbf16ger2nn AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVBF16GER2NP, 0xfc0007f8, 0xec000390, 0x600001, // VSX Vector bfloat16 GER (Rank-2 Update) Negative multiply, Positive accumulate XX3-form (xvbf16ger2np AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVBF16GER2PN, 0xfc0007f8, 0xec000590, 0x600001, // VSX Vector bfloat16 GER (Rank-2 Update) Positive multiply, Negative accumulate XX3-form (xvbf16ger2pn AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVBF16GER2PP, 0xfc0007f8, 0xec000190, 0x600001, // VSX Vector bfloat16 GER (Rank-2 Update) Positive multiply, Positive accumulate XX3-form (xvbf16ger2pp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCVBF16SPN, 0xfc1f07fc, 0xf010076c, 0x0, // VSX Vector Convert bfloat16 to Single-Precision format XX2-form (xvcvbf16spn XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSPBF16, 0xfc1f07fc, 0xf011076c, 0x0, // VSX Vector Convert with round Single-Precision to bfloat16 format XX2-form (xvcvspbf16 XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVF16GER2, 0xfc0007f8, 0xec000098, 0x600001, // VSX Vector 16-bit Floating-Point GER (rank-2 update) XX3-form (xvf16ger2 AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF16GER2NN, 0xfc0007f8, 0xec000690, 0x600001, // VSX Vector 16-bit Floating-Point GER (rank-2 update) Negative multiply, Negative accumulate XX3-form (xvf16ger2nn AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF16GER2NP, 0xfc0007f8, 0xec000290, 0x600001, // VSX Vector 16-bit Floating-Point GER (rank-2 update) Negative multiply, Positive accumulate XX3-form (xvf16ger2np AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF16GER2PN, 0xfc0007f8, 0xec000490, 0x600001, // VSX Vector 16-bit Floating-Point GER (rank-2 update) Positive multiply, Negative accumulate XX3-form (xvf16ger2pn AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF16GER2PP, 0xfc0007f8, 0xec000090, 0x600001, // VSX Vector 16-bit Floating-Point GER (rank-2 update) Positive multiply, Positive accumulate XX3-form (xvf16ger2pp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF32GER, 0xfc0007f8, 0xec0000d8, 0x600001, // VSX Vector 32-bit Floating-Point GER (rank-1 update) XX3-form (xvf32ger AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF32GERNN, 0xfc0007f8, 0xec0006d0, 0x600001, // VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate XX3-form (xvf32gernn AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF32GERNP, 0xfc0007f8, 0xec0002d0, 0x600001, // VSX Vector 32-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate XX3-form (xvf32gernp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF32GERPN, 0xfc0007f8, 0xec0004d0, 0x600001, // VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate XX3-form (xvf32gerpn AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF32GERPP, 0xfc0007f8, 0xec0000d0, 0x600001, // VSX Vector 32-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate XX3-form (xvf32gerpp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF64GER, 0xfc0007f8, 0xec0001d8, 0x600001, // VSX Vector 64-bit Floating-Point GER (rank-1 update) XX3-form (xvf64ger AT,XAp,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF64GERNN, 0xfc0007f8, 0xec0007d0, 0x600001, // VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Negative accumulate XX3-form (xvf64gernn AT,XAp,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF64GERNP, 0xfc0007f8, 0xec0003d0, 0x600001, // VSX Vector 64-bit Floating-Point GER (rank-1 update) Negative multiply, Positive accumulate XX3-form (xvf64gernp AT,XAp,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF64GERPN, 0xfc0007f8, 0xec0005d0, 0x600001, // VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Negative accumulate XX3-form (xvf64gerpn AT,XAp,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVF64GERPP, 0xfc0007f8, 0xec0001d0, 0x600001, // VSX Vector 64-bit Floating-Point GER (rank-1 update) Positive multiply, Positive accumulate XX3-form (xvf64gerpp AT,XAp,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI16GER2, 0xfc0007f8, 0xec000258, 0x600001, // VSX Vector 16-bit Signed Integer GER (rank-2 update) XX3-form (xvi16ger2 AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI16GER2PP, 0xfc0007f8, 0xec000358, 0x600001, // VSX Vector 16-bit Signed Integer GER (rank-2 update) Positive multiply, Positive accumulate XX3-form (xvi16ger2pp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI16GER2S, 0xfc0007f8, 0xec000158, 0x600001, // VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation XX3-form (xvi16ger2s AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI16GER2SPP, 0xfc0007f8, 0xec000150, 0x600001, // VSX Vector 16-bit Signed Integer GER (rank-2 update) with Saturation Positive multiply, Positive accumulate XX3-form (xvi16ger2spp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI4GER8, 0xfc0007f8, 0xec000118, 0x600001, // VSX Vector 4-bit Signed Integer GER (rank-8 update) XX3-form (xvi4ger8 AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI4GER8PP, 0xfc0007f8, 0xec000110, 0x600001, // VSX Vector 4-bit Signed Integer GER (rank-8 update) Positive multiply, Positive accumulate XX3-form (xvi4ger8pp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI8GER4, 0xfc0007f8, 0xec000018, 0x600001, // VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) XX3-form (xvi8ger4 AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI8GER4PP, 0xfc0007f8, 0xec000010, 0x600001, // VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) Positive multiply, Positive accumulate XX3-form (xvi8ger4pp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVI8GER4SPP, 0xfc0007f8, 0xec000318, 0x600001, // VSX Vector 8-bit Signed/Unsigned Integer GER (rank-4 update) with Saturate Positive multiply, Positive accumulate XX3-form (xvi8ger4spp AT,XA,XB)
-		[5]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_MMAReg_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVTLSBB, 0xfc1f07fc, 0xf002076c, 0x600001, // VSX Vector Test Least-Significant Bit by Byte XX2-form (xvtlsbb BF,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
+	{XXBLENDVB, 0xfff00000, 0x5000000, 0xfffff, // VSX Vector Blend Variable Byte 8RR:XX4-form (xxblendvb XT,XA,XB,XC)
+		[6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57}},
+	{XXBLENDVD, 0xfff00000, 0x5000000, 0xfffff, // VSX Vector Blend Variable Doubleword 8RR:XX4-form (xxblendvd XT,XA,XB,XC)
+		[6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57}},
+	{XXBLENDVH, 0xfff00000, 0x5000000, 0xfffff, // VSX Vector Blend Variable Halfword 8RR:XX4-form (xxblendvh XT,XA,XB,XC)
+		[6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57}},
+	{XXBLENDVW, 0xfff00000, 0x5000000, 0xfffff, // VSX Vector Blend Variable Word 8RR:XX4-form (xxblendvw XT,XA,XB,XC)
+		[6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57}},
+	{XXEVAL, 0xfff00000, 0x5000000, 0xfff00, // VSX Vector Evaluate 8RR-XX4-form (xxeval XT,XA,XB,XC,IMM)
+		[6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57, ap_ImmUnsigned_24_31}},
 	{XXGENPCVBM, 0xfc0007fe, 0xf0000728, 0x0, // VSX Vector Generate PCV from Byte Mask X-form (xxgenpcvbm XT,VRB,IMM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
 	{XXGENPCVDM, 0xfc0007fe, 0xf000076a, 0x0, // VSX Vector Generate PCV from Doubleword Mask X-form (xxgenpcvdm XT,VRB,IMM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
 	{XXGENPCVHM, 0xfc0007fe, 0xf000072a, 0x0, // VSX Vector Generate PCV from Halfword Mask X-form (xxgenpcvhm XT,VRB,IMM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
 	{XXGENPCVWM, 0xfc0007fe, 0xf0000768, 0x0, // VSX Vector Generate PCV from Word Mask X-form (xxgenpcvwm XT,VRB,IMM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
 	{XXMFACC, 0xfc1f07fe, 0x7c000162, 0x60f801, // VSX Move From Accumulator X-form (xxmfacc AS)
-		[5]*argField{ap_MMAReg_6_8}},
+		[6]*argField{ap_MMAReg_6_8}},
 	{XXMTACC, 0xfc1f07fe, 0x7c010162, 0x60f801, // VSX Move To Accumulator X-form (xxmtacc AT)
-		[5]*argField{ap_MMAReg_6_8}},
+		[6]*argField{ap_MMAReg_6_8}},
+	{XXPERMX, 0xfff00000, 0x5000000, 0xffff8, // VSX Vector Permute Extended 8RR:XX4-form (xxpermx XT,XA,XB,XC,UIM)
+		[6]*argField{ap_VecSReg_63_63_38_42, ap_VecSReg_61_61_43_47, ap_VecSReg_62_62_48_52, ap_VecSReg_60_60_53_57, ap_ImmUnsigned_29_31}},
 	{XXSETACCZ, 0xfc1f07fe, 0x7c030162, 0x60f801, // VSX Set Accumulator to Zero X-form (xxsetaccz AT)
-		[5]*argField{ap_MMAReg_6_8}},
+		[6]*argField{ap_MMAReg_6_8}},
+	{XXSPLTI32DX, 0xfff00000, 0x5000000, 0xf0000, // VSX Vector Splat Immediate32 Doubleword Indexed 8RR:D-form (xxsplti32dx XT,IX,IMM32)
+		[6]*argField{ap_VecSReg_47_47_38_42, ap_ImmUnsigned_46_46, ap_ImmUnsigned_16_31_48_63}},
+	{XXSPLTIDP, 0xfff00000, 0x5000000, 0xf0000, // VSX Vector Splat Immediate Double-Precision 8RR:D-form (xxspltidp XT,IMM32)
+		[6]*argField{ap_VecSReg_47_47_38_42, ap_ImmUnsigned_16_31_48_63}},
+	{XXSPLTIW, 0xfff00000, 0x5000000, 0xf0000, // VSX Vector Splat Immediate Word 8RR:D-form (xxspltiw XT,IMM32)
+		[6]*argField{ap_VecSReg_47_47_38_42, ap_ImmUnsigned_16_31_48_63}},
 	{MSGCLRU, 0xfc0007fe, 0x7c0000dc, 0x3ff0001, // Ultravisor Message Clear X-form (msgclru RB)
-		[5]*argField{ap_Reg_16_20}},
+		[6]*argField{ap_Reg_16_20}},
 	{MSGSNDU, 0xfc0007fe, 0x7c00009c, 0x3ff0001, // Ultravisor Message SendX-form (msgsndu RB)
-		[5]*argField{ap_Reg_16_20}},
+		[6]*argField{ap_Reg_16_20}},
 	{URFID, 0xfc0007fe, 0x4c000264, 0x3fff801, // Ultravisor Return From Interrupt Doubleword XL-form (urfid)
-		[5]*argField{}},
+		[6]*argField{}},
 	{ADDEX, 0xfc0001fe, 0x7c000154, 0x1, // Add Extended using alternate carry bit Z23-form (addex RT,RA,RB,CY)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_21_22}},
 	{MFFSCDRN, 0xfc1f07fe, 0xfc14048e, 0x1, // Move From FPSCR Control & Set DRN X-form (mffscdrn FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{MFFSCDRNI, 0xfc1f07fe, 0xfc15048e, 0xc001, // Move From FPSCR Control & Set DRN Immediate X-form (mffscdrni FRT,DRM)
-		[5]*argField{ap_FPReg_6_10, ap_ImmUnsigned_18_20}},
+		[6]*argField{ap_FPReg_6_10, ap_ImmUnsigned_18_20}},
 	{MFFSCE, 0xfc1f07fe, 0xfc01048e, 0xf801, // Move From FPSCR & Clear Enables X-form (mffsce FRT)
-		[5]*argField{ap_FPReg_6_10}},
+		[6]*argField{ap_FPReg_6_10}},
 	{MFFSCRN, 0xfc1f07fe, 0xfc16048e, 0x1, // Move From FPSCR Control & Set RN X-form (mffscrn FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{MFFSCRNI, 0xfc1f07fe, 0xfc17048e, 0xe001, // Move From FPSCR Control & Set RN Immediate X-form (mffscrni FRT,RM)
-		[5]*argField{ap_FPReg_6_10, ap_ImmUnsigned_19_20}},
+		[6]*argField{ap_FPReg_6_10, ap_ImmUnsigned_19_20}},
 	{MFFSL, 0xfc1f07fe, 0xfc18048e, 0xf801, // Move From FPSCR Lightweight X-form (mffsl FRT)
-		[5]*argField{ap_FPReg_6_10}},
+		[6]*argField{ap_FPReg_6_10}},
 	{SLBIAG, 0xfc0007fe, 0x7c0006a4, 0x1ef801, // SLB Invalidate All Global X-form (slbiag RS, L)
-		[5]*argField{ap_Reg_6_10, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_6_10, ap_ImmUnsigned_15_15}},
 	{VMSUMUDM, 0xfc00003f, 0x10000023, 0x0, // Vector Multiply-Sum Unsigned Doubleword Modulo VA-form (vmsumudm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{ADDPCIS, 0xfc00003e, 0x4c000004, 0x0, // Add PC Immediate Shifted DX-form (addpcis RT,D)
-		[5]*argField{ap_Reg_6_10, ap_ImmSigned_16_25_11_15_31_31}},
+		[6]*argField{ap_Reg_6_10, ap_ImmSigned_16_25_11_15_31_31}},
 	{BCDCFNCC, 0xfc1f05ff, 0x10070581, 0x0, // Decimal Convert From National VX-form (bcdcfn. VRT,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDCFSQCC, 0xfc1f05ff, 0x10020581, 0x0, // Decimal Convert From Signed Quadword VX-form (bcdcfsq. VRT,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDCFZCC, 0xfc1f05ff, 0x10060581, 0x0, // Decimal Convert From Zoned VX-form (bcdcfz. VRT,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDCPSGNCC, 0xfc0007ff, 0x10000341, 0x0, // Decimal Copy Sign VX-form (bcdcpsgn. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{BCDCTNCC, 0xfc1f05ff, 0x10050581, 0x200, // Decimal Convert To National VX-form (bcdctn. VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{BCDCTSQCC, 0xfc1f05ff, 0x10000581, 0x200, // Decimal Convert To Signed Quadword VX-form (bcdctsq. VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{BCDCTZCC, 0xfc1f05ff, 0x10040581, 0x0, // Decimal Convert To Zoned VX-form (bcdctz. VRT,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDSCC, 0xfc0005ff, 0x100004c1, 0x0, // Decimal Shift VX-form (bcds. VRT,VRA,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDSETSGNCC, 0xfc1f05ff, 0x101f0581, 0x0, // Decimal Set Sign VX-form (bcdsetsgn. VRT,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDSRCC, 0xfc0005ff, 0x100005c1, 0x0, // Decimal Shift and Round VX-form (bcdsr. VRT,VRA,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDTRUNCCC, 0xfc0005ff, 0x10000501, 0x0, // Decimal Truncate VX-form (bcdtrunc. VRT,VRA,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDUSCC, 0xfc0005ff, 0x10000481, 0x200, // Decimal Unsigned Shift VX-form (bcdus. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{BCDUTRUNCCC, 0xfc0005ff, 0x10000541, 0x200, // Decimal Unsigned Truncate VX-form (bcdutrunc. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{CMPEQB, 0xfc0007fe, 0x7c0001c0, 0x600001, // Compare Equal Byte X-form (cmpeqb BF,RA,RB)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
 	{CMPRB, 0xfc0007fe, 0x7c000180, 0x400001, // Compare Ranged Byte X-form (cmprb BF,L,RA,RB)
-		[5]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{CNTTZD, 0xfc0007ff, 0x7c000474, 0xf800, // Count Trailing Zeros Doubleword X-form (cnttzd RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{CNTTZDCC, 0xfc0007ff, 0x7c000475, 0xf800, // Count Trailing Zeros Doubleword X-form (cnttzd. RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{CNTTZW, 0xfc0007ff, 0x7c000434, 0xf800, // Count Trailing Zeros Word X-form (cnttzw RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{CNTTZWCC, 0xfc0007ff, 0x7c000435, 0xf800, // Count Trailing Zeros Word X-form (cnttzw. RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{COPY, 0xfc2007fe, 0x7c20060c, 0x3c00001, // Copy X-form (copy RA,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_16_20}},
 	{CPABORT, 0xfc0007fe, 0x7c00068c, 0x3fff801, // Copy-Paste Abort X-form (cpabort)
-		[5]*argField{}},
+		[6]*argField{}},
 	{DARN, 0xfc0007fe, 0x7c0005e6, 0x1cf801, // Deliver A Random Number X-form (darn RT,L)
-		[5]*argField{ap_Reg_6_10, ap_ImmUnsigned_14_15}},
+		[6]*argField{ap_Reg_6_10, ap_ImmUnsigned_14_15}},
 	{DTSTSFI, 0xfc0007fe, 0xec000546, 0x400001, // DFP Test Significance Immediate X-form (dtstsfi BF,UIM,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_15, ap_FPReg_16_20}},
 	{DTSTSFIQ, 0xfc0007fe, 0xfc000546, 0x400001, // DFP Test Significance Immediate Quad X-form (dtstsfiq BF,UIM,FRBp)
-		[5]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_15, ap_FPReg_16_20}},
 	{EXTSWSLI, 0xfc0007fd, 0x7c0006f4, 0x0, // Extend Sign Word and Shift Left Immediate XS-form (extswsli RA,RS,SH)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}},
 	{EXTSWSLICC, 0xfc0007fd, 0x7c0006f5, 0x0, // Extend Sign Word and Shift Left Immediate XS-form (extswsli. RA,RS,SH)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}},
 	{LDAT, 0xfc0007fe, 0x7c0004cc, 0x1, // Load Doubleword ATomic X-form (ldat RT,RA,FC)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
 	{LWAT, 0xfc0007fe, 0x7c00048c, 0x1, // Load Word ATomic X-form (lwat RT,RA,FC)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
 	{LXSD, 0xfc000003, 0xe4000002, 0x0, // Load VSX Scalar Doubleword DS-form (lxsd VRT,DS(RA))
-		[5]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{LXSIBZX, 0xfc0007fe, 0x7c00061a, 0x0, // Load VSX Scalar as Integer Byte & Zero Indexed X-form (lxsibzx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXSIHZX, 0xfc0007fe, 0x7c00065a, 0x0, // Load VSX Scalar as Integer Halfword & Zero Indexed X-form (lxsihzx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXSSP, 0xfc000003, 0xe4000003, 0x0, // Load VSX Scalar Single-Precision DS-form (lxssp VRT,DS(RA))
-		[5]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{LXV, 0xfc000007, 0xf4000001, 0x0, // Load VSX Vector DQ-form (lxv XT,DQ(RA))
-		[5]*argField{ap_VecSReg_28_28_6_10, ap_Offset_16_27_shift4, ap_Reg_11_15}},
+		[6]*argField{ap_VecSReg_28_28_6_10, ap_Offset_16_27_shift4, ap_Reg_11_15}},
 	{LXVB16X, 0xfc0007fe, 0x7c0006d8, 0x0, // Load VSX Vector Byte*16 Indexed X-form (lxvb16x XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVH8X, 0xfc0007fe, 0x7c000658, 0x0, // Load VSX Vector Halfword*8 Indexed X-form (lxvh8x XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVL, 0xfc0007fe, 0x7c00021a, 0x0, // Load VSX Vector with Length X-form (lxvl XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVLL, 0xfc0007fe, 0x7c00025a, 0x0, // Load VSX Vector with Length Left-justified X-form (lxvll XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVWSX, 0xfc0007fe, 0x7c0002d8, 0x0, // Load VSX Vector Word & Splat Indexed X-form (lxvwsx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVX, 0xfc0007be, 0x7c000218, 0x40, // Load VSX Vector Indexed X-form (lxvx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MADDHD, 0xfc00003f, 0x10000030, 0x0, // Multiply-Add High Doubleword VA-form (maddhd RT,RA,RB,RC)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}},
 	{MADDHDU, 0xfc00003f, 0x10000031, 0x0, // Multiply-Add High Doubleword Unsigned VA-form (maddhdu RT,RA,RB,RC)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}},
 	{MADDLD, 0xfc00003f, 0x10000033, 0x0, // Multiply-Add Low Doubleword VA-form (maddld RT,RA,RB,RC)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_Reg_21_25}},
 	{MCRXRX, 0xfc0007fe, 0x7c000480, 0x7ff801, // Move to CR from XER Extended X-form (mcrxrx BF)
-		[5]*argField{ap_CondRegField_6_8}},
+		[6]*argField{ap_CondRegField_6_8}},
 	{MFVSRLD, 0xfc0007fe, 0x7c000266, 0xf800, // Move From VSR Lower Doubleword X-form (mfvsrld RA,XS)
-		[5]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}},
 	{MODSD, 0xfc0007fe, 0x7c000612, 0x1, // Modulo Signed Doubleword X-form (modsd RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MODSW, 0xfc0007fe, 0x7c000616, 0x1, // Modulo Signed Word X-form (modsw RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MODUD, 0xfc0007fe, 0x7c000212, 0x1, // Modulo Unsigned Doubleword X-form (modud RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MODUW, 0xfc0007fe, 0x7c000216, 0x1, // Modulo Unsigned Word X-form (moduw RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MSGSYNC, 0xfc0007fe, 0x7c0006ec, 0x3fff801, // Message Synchronize X-form (msgsync)
-		[5]*argField{}},
+		[6]*argField{}},
 	{MTVSRDD, 0xfc0007fe, 0x7c000366, 0x0, // Move To VSR Double Doubleword X-form (mtvsrdd XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MTVSRWS, 0xfc0007fe, 0x7c000326, 0xf800, // Move To VSR Word & Splat X-form (mtvsrws XT,RA)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
 	{PASTECC, 0xfc0007ff, 0x7c00070d, 0x3c00000, // Paste X-form (paste. RA,RB,L)
-		[5]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_10_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_10_10}},
 	{SETB, 0xfc0007fe, 0x7c000100, 0x3f801, // Set Boolean X-form (setb RT,BFA)
-		[5]*argField{ap_Reg_6_10, ap_CondRegField_11_13}},
+		[6]*argField{ap_Reg_6_10, ap_CondRegField_11_13}},
 	{SLBIEG, 0xfc0007fe, 0x7c0003a4, 0x1f0001, // SLB Invalidate Entry Global X-form (slbieg RS,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_16_20}},
 	{SLBSYNC, 0xfc0007fe, 0x7c0002a4, 0x3fff801, // SLB Synchronize X-form (slbsync)
-		[5]*argField{}},
+		[6]*argField{}},
 	{STDAT, 0xfc0007fe, 0x7c0005cc, 0x1, // Store Doubleword ATomic X-form (stdat RS,RA,FC)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
 	{STOP, 0xfc0007fe, 0x4c0002e4, 0x3fff801, // Stop XL-form (stop)
-		[5]*argField{}},
+		[6]*argField{}},
 	{STWAT, 0xfc0007fe, 0x7c00058c, 0x1, // Store Word ATomic X-form (stwat RS,RA,FC)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
 	{STXSD, 0xfc000003, 0xf4000002, 0x0, // Store VSX Scalar Doubleword DS-form (stxsd VRS,DS(RA))
-		[5]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{STXSIBX, 0xfc0007fe, 0x7c00071a, 0x0, // Store VSX Scalar as Integer Byte Indexed X-form (stxsibx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXSIHX, 0xfc0007fe, 0x7c00075a, 0x0, // Store VSX Scalar as Integer Halfword Indexed X-form (stxsihx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXSSP, 0xfc000003, 0xf4000003, 0x0, // Store VSX Scalar Single DS-form (stxssp VRS,DS(RA))
-		[5]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{STXV, 0xfc000007, 0xf4000005, 0x0, // Store VSX Vector DQ-form (stxv XS,DQ(RA))
-		[5]*argField{ap_VecSReg_28_28_6_10, ap_Offset_16_27_shift4, ap_Reg_11_15}},
+		[6]*argField{ap_VecSReg_28_28_6_10, ap_Offset_16_27_shift4, ap_Reg_11_15}},
 	{STXVB16X, 0xfc0007fe, 0x7c0007d8, 0x0, // Store VSX Vector Byte*16 Indexed X-form (stxvb16x XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVH8X, 0xfc0007fe, 0x7c000758, 0x0, // Store VSX Vector Halfword*8 Indexed X-form (stxvh8x XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVL, 0xfc0007fe, 0x7c00031a, 0x0, // Store VSX Vector with Length X-form (stxvl XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVLL, 0xfc0007fe, 0x7c00035a, 0x0, // Store VSX Vector with Length Left-justified X-form (stxvll XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVX, 0xfc0007fe, 0x7c000318, 0x0, // Store VSX Vector Indexed X-form (stxvx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VABSDUB, 0xfc0007ff, 0x10000403, 0x0, // Vector Absolute Difference Unsigned Byte VX-form (vabsdub VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VABSDUH, 0xfc0007ff, 0x10000443, 0x0, // Vector Absolute Difference Unsigned Halfword VX-form (vabsduh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VABSDUW, 0xfc0007ff, 0x10000483, 0x0, // Vector Absolute Difference Unsigned Word VX-form (vabsduw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VBPERMD, 0xfc0007ff, 0x100005cc, 0x0, // Vector Bit Permute Doubleword VX-form (vbpermd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCLZLSBB, 0xfc1f07ff, 0x10000602, 0x0, // Vector Count Leading Zero Least-Significant Bits Byte VX-form (vclzlsbb RT,VRB)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
 	{VCMPNEB, 0xfc0007ff, 0x10000007, 0x0, // Vector Compare Not Equal Byte VC-form (vcmpneb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEBCC, 0xfc0007ff, 0x10000407, 0x0, // Vector Compare Not Equal Byte VC-form (vcmpneb. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEH, 0xfc0007ff, 0x10000047, 0x0, // Vector Compare Not Equal Halfword VC-form (vcmpneh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEHCC, 0xfc0007ff, 0x10000447, 0x0, // Vector Compare Not Equal Halfword VC-form (vcmpneh. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEW, 0xfc0007ff, 0x10000087, 0x0, // Vector Compare Not Equal Word VC-form (vcmpnew VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEWCC, 0xfc0007ff, 0x10000487, 0x0, // Vector Compare Not Equal Word VC-form (vcmpnew. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEZB, 0xfc0007ff, 0x10000107, 0x0, // Vector Compare Not Equal or Zero Byte VC-form (vcmpnezb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEZBCC, 0xfc0007ff, 0x10000507, 0x0, // Vector Compare Not Equal or Zero Byte VC-form (vcmpnezb. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEZH, 0xfc0007ff, 0x10000147, 0x0, // Vector Compare Not Equal or Zero Halfword VC-form (vcmpnezh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEZHCC, 0xfc0007ff, 0x10000547, 0x0, // Vector Compare Not Equal or Zero Halfword VC-form (vcmpnezh. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEZW, 0xfc0007ff, 0x10000187, 0x0, // Vector Compare Not Equal or Zero Word VC-form (vcmpnezw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPNEZWCC, 0xfc0007ff, 0x10000587, 0x0, // Vector Compare Not Equal or Zero Word VC-form (vcmpnezw. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCTZB, 0xfc1f07ff, 0x101c0602, 0x0, // Vector Count Trailing Zeros Byte VX-form (vctzb VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VCTZD, 0xfc1f07ff, 0x101f0602, 0x0, // Vector Count Trailing Zeros Doubleword VX-form (vctzd VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VCTZH, 0xfc1f07ff, 0x101d0602, 0x0, // Vector Count Trailing Zeros Halfword VX-form (vctzh VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VCTZLSBB, 0xfc1f07ff, 0x10010602, 0x0, // Vector Count Trailing Zero Least-Significant Bits Byte VX-form (vctzlsbb RT,VRB)
-		[5]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecReg_16_20}},
 	{VCTZW, 0xfc1f07ff, 0x101e0602, 0x0, // Vector Count Trailing Zeros Word VX-form (vctzw VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXTRACTD, 0xfc0007ff, 0x100002cd, 0x100000, // Vector Extract Doubleword to VSR using immediate-specified index VX-form (vextractd VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VEXTRACTUB, 0xfc0007ff, 0x1000020d, 0x100000, // Vector Extract Unsigned Byte to VSR using immediate-specified index VX-form (vextractub VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VEXTRACTUH, 0xfc0007ff, 0x1000024d, 0x100000, // Vector Extract Unsigned Halfword to VSR using immediate-specified index VX-form (vextractuh VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VEXTRACTUW, 0xfc0007ff, 0x1000028d, 0x100000, // Vector Extract Unsigned Word to VSR using immediate-specified index VX-form (vextractuw VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VEXTSB2D, 0xfc1f07ff, 0x10180602, 0x0, // Vector Extend Sign Byte To Doubleword VX-form (vextsb2d VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXTSB2W, 0xfc1f07ff, 0x10100602, 0x0, // Vector Extend Sign Byte To Word VX-form (vextsb2w VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXTSH2D, 0xfc1f07ff, 0x10190602, 0x0, // Vector Extend Sign Halfword To Doubleword VX-form (vextsh2d VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXTSH2W, 0xfc1f07ff, 0x10110602, 0x0, // Vector Extend Sign Halfword To Word VX-form (vextsh2w VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXTSW2D, 0xfc1f07ff, 0x101a0602, 0x0, // Vector Extend Sign Word To Doubleword VX-form (vextsw2d VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VEXTUBLX, 0xfc0007ff, 0x1000060d, 0x0, // Vector Extract Unsigned Byte to GPR using GPR-specified Left-Index VX-form (vextublx RT,RA,VRB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VEXTUBRX, 0xfc0007ff, 0x1000070d, 0x0, // Vector Extract Unsigned Byte to GPR using GPR-specified Right-Index VX-form (vextubrx RT,RA,VRB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VEXTUHLX, 0xfc0007ff, 0x1000064d, 0x0, // Vector Extract Unsigned Halfword to GPR using GPR-specified Left-Index VX-form (vextuhlx RT,RA,VRB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VEXTUHRX, 0xfc0007ff, 0x1000074d, 0x0, // Vector Extract Unsigned Halfword to GPR using GPR-specified Right-Index VX-form (vextuhrx RT,RA,VRB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VEXTUWLX, 0xfc0007ff, 0x1000068d, 0x0, // Vector Extract Unsigned Word to GPR using GPR-specified Left-Index VX-form (vextuwlx RT,RA,VRB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VEXTUWRX, 0xfc0007ff, 0x1000078d, 0x0, // Vector Extract Unsigned Word to GPR using GPR-specified Right-Index VX-form (vextuwrx RT,RA,VRB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_VecReg_16_20}},
 	{VINSERTB, 0xfc0007ff, 0x1000030d, 0x100000, // Vector Insert Byte from VSR using immediate-specified index VX-form (vinsertb VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VINSERTD, 0xfc0007ff, 0x100003cd, 0x100000, // Vector Insert Doubleword from VSR using immediate-specified index VX-form (vinsertd VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VINSERTH, 0xfc0007ff, 0x1000034d, 0x100000, // Vector Insert Halfword from VSR using immediate-specified index VX-form (vinserth VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VINSERTW, 0xfc0007ff, 0x1000038d, 0x100000, // Vector Insert Word from VSR using immediate-specified index VX-form (vinsertw VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VMUL10CUQ, 0xfc0007ff, 0x10000001, 0xf800, // Vector Multiply-by-10 & write Carry-out Unsigned Quadword VX-form (vmul10cuq VRT,VRA)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15}},
 	{VMUL10ECUQ, 0xfc0007ff, 0x10000041, 0x0, // Vector Multiply-by-10 Extended & write Carry-out Unsigned Quadword VX-form (vmul10ecuq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMUL10EUQ, 0xfc0007ff, 0x10000241, 0x0, // Vector Multiply-by-10 Extended Unsigned Quadword VX-form (vmul10euq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMUL10UQ, 0xfc0007ff, 0x10000201, 0xf800, // Vector Multiply-by-10 Unsigned Quadword VX-form (vmul10uq VRT,VRA)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15}},
 	{VNEGD, 0xfc1f07ff, 0x10070602, 0x0, // Vector Negate Doubleword VX-form (vnegd VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VNEGW, 0xfc1f07ff, 0x10060602, 0x0, // Vector Negate Word VX-form (vnegw VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VPERMR, 0xfc00003f, 0x1000003b, 0x0, // Vector Permute Right-indexed VA-form (vpermr VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VPRTYBD, 0xfc1f07ff, 0x10090602, 0x0, // Vector Parity Byte Doubleword VX-form (vprtybd VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VPRTYBQ, 0xfc1f07ff, 0x100a0602, 0x0, // Vector Parity Byte Quadword VX-form (vprtybq VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VPRTYBW, 0xfc1f07ff, 0x10080602, 0x0, // Vector Parity Byte Word VX-form (vprtybw VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VRLDMI, 0xfc0007ff, 0x100000c5, 0x0, // Vector Rotate Left Doubleword then Mask Insert VX-form (vrldmi VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRLDNM, 0xfc0007ff, 0x100001c5, 0x0, // Vector Rotate Left Doubleword then AND with Mask VX-form (vrldnm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRLWMI, 0xfc0007ff, 0x10000085, 0x0, // Vector Rotate Left Word then Mask Insert VX-form (vrlwmi VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRLWNM, 0xfc0007ff, 0x10000185, 0x0, // Vector Rotate Left Word then AND with Mask VX-form (vrlwnm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSLV, 0xfc0007ff, 0x10000744, 0x0, // Vector Shift Left Variable VX-form (vslv VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRV, 0xfc0007ff, 0x10000704, 0x0, // Vector Shift Right Variable VX-form (vsrv VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{WAIT, 0xfc0007fe, 0x7c00003c, 0x9cf801, // Wait X-form (wait WC,PL)
-		[5]*argField{ap_ImmUnsigned_9_10, ap_ImmUnsigned_14_15}},
+		[6]*argField{ap_ImmUnsigned_9_10, ap_ImmUnsigned_14_15}},
 	{XSABSQP, 0xfc1f07fe, 0xfc000648, 0x1, // VSX Scalar Absolute Quad-Precision X-form (xsabsqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSADDQP, 0xfc0007ff, 0xfc000008, 0x0, // VSX Scalar Add Quad-Precision [using round to Odd] X-form (xsaddqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSADDQPO, 0xfc0007ff, 0xfc000009, 0x0, // VSX Scalar Add Quad-Precision [using round to Odd] X-form (xsaddqpo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSCMPEQDP, 0xfc0007f8, 0xf0000018, 0x0, // VSX Scalar Compare Equal Double-Precision XX3-form (xscmpeqdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCMPEXPDP, 0xfc0007f8, 0xf00001d8, 0x600001, // VSX Scalar Compare Exponents Double-Precision XX3-form (xscmpexpdp BF,XA,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCMPEXPQP, 0xfc0007fe, 0xfc000148, 0x600001, // VSX Scalar Compare Exponents Quad-Precision X-form (xscmpexpqp BF,VRA,VRB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSCMPGEDP, 0xfc0007f8, 0xf0000098, 0x0, // VSX Scalar Compare Greater Than or Equal Double-Precision XX3-form (xscmpgedp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCMPGTDP, 0xfc0007f8, 0xf0000058, 0x0, // VSX Scalar Compare Greater Than Double-Precision XX3-form (xscmpgtdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCMPOQP, 0xfc0007fe, 0xfc000108, 0x600001, // VSX Scalar Compare Ordered Quad-Precision X-form (xscmpoqp BF,VRA,VRB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSCMPUQP, 0xfc0007fe, 0xfc000508, 0x600001, // VSX Scalar Compare Unordered Quad-Precision X-form (xscmpuqp BF,VRA,VRB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSCPSGNQP, 0xfc0007fe, 0xfc0000c8, 0x1, // VSX Scalar Copy Sign Quad-Precision X-form (xscpsgnqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSCVDPHP, 0xfc1f07fc, 0xf011056c, 0x0, // VSX Scalar Convert with round Double-Precision to Half-Precision format XX2-form (xscvdphp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVDPQP, 0xfc1f07fe, 0xfc160688, 0x1, // VSX Scalar Convert Double-Precision to Quad-Precision format X-form (xscvdpqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVHPDP, 0xfc1f07fc, 0xf010056c, 0x0, // VSX Scalar Convert Half-Precision to Double-Precision format XX2-form (xscvhpdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVQPDP, 0xfc1f07ff, 0xfc140688, 0x0, // VSX Scalar Convert with round Quad-Precision to Double-Precision format [using round to Odd] X-form (xscvqpdp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVQPDPO, 0xfc1f07ff, 0xfc140689, 0x0, // VSX Scalar Convert with round Quad-Precision to Double-Precision format [using round to Odd] X-form (xscvqpdpo VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVQPSDZ, 0xfc1f07fe, 0xfc190688, 0x1, // VSX Scalar Convert with round to zero Quad-Precision to Signed Doubleword format X-form (xscvqpsdz VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVQPSWZ, 0xfc1f07fe, 0xfc090688, 0x1, // VSX Scalar Convert with round to zero Quad-Precision to Signed Word format X-form (xscvqpswz VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVQPUDZ, 0xfc1f07fe, 0xfc110688, 0x1, // VSX Scalar Convert with round to zero Quad-Precision to Unsigned Doubleword format X-form (xscvqpudz VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVQPUWZ, 0xfc1f07fe, 0xfc010688, 0x1, // VSX Scalar Convert with round to zero Quad-Precision to Unsigned Word format X-form (xscvqpuwz VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVSDQP, 0xfc1f07fe, 0xfc0a0688, 0x1, // VSX Scalar Convert Signed Doubleword to Quad-Precision format X-form (xscvsdqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSCVUDQP, 0xfc1f07fe, 0xfc020688, 0x1, // VSX Scalar Convert Unsigned Doubleword to Quad-Precision format X-form (xscvudqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSDIVQP, 0xfc0007ff, 0xfc000448, 0x0, // VSX Scalar Divide Quad-Precision [using round to Odd] X-form (xsdivqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSDIVQPO, 0xfc0007ff, 0xfc000449, 0x0, // VSX Scalar Divide Quad-Precision [using round to Odd] X-form (xsdivqpo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSIEXPDP, 0xfc0007fe, 0xf000072c, 0x0, // VSX Scalar Insert Exponent Double-Precision X-form (xsiexpdp XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{XSIEXPQP, 0xfc0007fe, 0xfc0006c8, 0x1, // VSX Scalar Insert Exponent Quad-Precision X-form (xsiexpqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSMADDQP, 0xfc0007ff, 0xfc000308, 0x0, // VSX Scalar Multiply-Add Quad-Precision [using round to Odd] X-form (xsmaddqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSMADDQPO, 0xfc0007ff, 0xfc000309, 0x0, // VSX Scalar Multiply-Add Quad-Precision [using round to Odd] X-form (xsmaddqpo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSMAXCDP, 0xfc0007f8, 0xf0000400, 0x0, // VSX Scalar Maximum Type-C Double-Precision XX3-form (xsmaxcdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMAXJDP, 0xfc0007f8, 0xf0000480, 0x0, // VSX Scalar Maximum Type-J Double-Precision XX3-form (xsmaxjdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMINCDP, 0xfc0007f8, 0xf0000440, 0x0, // VSX Scalar Minimum Type-C Double-Precision XX3-form (xsmincdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMINJDP, 0xfc0007f8, 0xf00004c0, 0x0, // VSX Scalar Minimum Type-J Double-Precision XX3-form (xsminjdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMSUBQP, 0xfc0007ff, 0xfc000348, 0x0, // VSX Scalar Multiply-Subtract Quad-Precision [using round to Odd] X-form (xsmsubqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSMSUBQPO, 0xfc0007ff, 0xfc000349, 0x0, // VSX Scalar Multiply-Subtract Quad-Precision [using round to Odd] X-form (xsmsubqpo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSMULQP, 0xfc0007ff, 0xfc000048, 0x0, // VSX Scalar Multiply Quad-Precision [using round to Odd] X-form (xsmulqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSMULQPO, 0xfc0007ff, 0xfc000049, 0x0, // VSX Scalar Multiply Quad-Precision [using round to Odd] X-form (xsmulqpo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSNABSQP, 0xfc1f07fe, 0xfc080648, 0x0, // VSX Scalar Negative Absolute Quad-Precision X-form (xsnabsqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSNEGQP, 0xfc1f07fe, 0xfc100648, 0x1, // VSX Scalar Negate Quad-Precision X-form (xsnegqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSNMADDQP, 0xfc0007ff, 0xfc000388, 0x0, // VSX Scalar Negative Multiply-Add Quad-Precision [using round to Odd] X-form (xsnmaddqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSNMADDQPO, 0xfc0007ff, 0xfc000389, 0x0, // VSX Scalar Negative Multiply-Add Quad-Precision [using round to Odd] X-form (xsnmaddqpo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSNMSUBQP, 0xfc0007ff, 0xfc0003c8, 0x0, // VSX Scalar Negative Multiply-Subtract Quad-Precision [using round to Odd] X-form (xsnmsubqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSNMSUBQPO, 0xfc0007ff, 0xfc0003c9, 0x0, // VSX Scalar Negative Multiply-Subtract Quad-Precision [using round to Odd] X-form (xsnmsubqpo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSRQPI, 0xfc0001ff, 0xfc00000a, 0x1e0000, // VSX Scalar Round to Quad-Precision Integer [with Inexact] Z23-form (xsrqpi R,VRT,VRB,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}},
 	{XSRQPIX, 0xfc0001ff, 0xfc00000b, 0x1e0000, // VSX Scalar Round to Quad-Precision Integer [with Inexact] Z23-form (xsrqpix R,VRT,VRB,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}},
 	{XSRQPXP, 0xfc0001fe, 0xfc00004a, 0x1e0001, // VSX Scalar Round Quad-Precision to Double-Extended Precision Z23-form (xsrqpxp R,VRT,VRB,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_21_22}},
 	{XSSQRTQP, 0xfc1f07ff, 0xfc1b0648, 0x0, // VSX Scalar Square Root Quad-Precision [using round to Odd] X-form (xssqrtqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSSQRTQPO, 0xfc1f07ff, 0xfc1b0649, 0x0, // VSX Scalar Square Root Quad-Precision [using round to Odd] X-form (xssqrtqpo VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSSUBQP, 0xfc0007ff, 0xfc000408, 0x0, // VSX Scalar Subtract Quad-Precision [using round to Odd] X-form (xssubqp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSSUBQPO, 0xfc0007ff, 0xfc000409, 0x0, // VSX Scalar Subtract Quad-Precision [using round to Odd] X-form (xssubqpo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{XSTSTDCDP, 0xfc0007fc, 0xf00005a8, 0x1, // VSX Scalar Test Data Class Double-Precision XX2-form (xststdcdp BF,XB,DCMX)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_9_15}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_9_15}},
 	{XSTSTDCQP, 0xfc0007fe, 0xfc000588, 0x1, // VSX Scalar Test Data Class Quad-Precision X-form (xststdcqp BF,VRB,DCMX)
-		[5]*argField{ap_CondRegField_6_8, ap_VecReg_16_20, ap_ImmUnsigned_9_15}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecReg_16_20, ap_ImmUnsigned_9_15}},
 	{XSTSTDCSP, 0xfc0007fc, 0xf00004a8, 0x1, // VSX Scalar Test Data Class Single-Precision XX2-form (xststdcsp BF,XB,DCMX)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_9_15}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_9_15}},
 	{XSXEXPDP, 0xfc1f07fc, 0xf000056c, 0x1, // VSX Scalar Extract Exponent Double-Precision XX2-form (xsxexpdp RT,XB)
-		[5]*argField{ap_Reg_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecSReg_30_30_16_20}},
 	{XSXEXPQP, 0xfc1f07fe, 0xfc020648, 0x1, // VSX Scalar Extract Exponent Quad-Precision X-form (xsxexpqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSXSIGDP, 0xfc1f07fc, 0xf001056c, 0x1, // VSX Scalar Extract Significand Double-Precision XX2-form (xsxsigdp RT,XB)
-		[5]*argField{ap_Reg_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_VecSReg_30_30_16_20}},
 	{XSXSIGQP, 0xfc1f07fe, 0xfc120648, 0x1, // VSX Scalar Extract Significand Quad-Precision X-form (xsxsigqp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XVCVHPSP, 0xfc1f07fc, 0xf018076c, 0x0, // VSX Vector Convert Half-Precision to Single-Precision format XX2-form (xvcvhpsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSPHP, 0xfc1f07fc, 0xf019076c, 0x0, // VSX Vector Convert with round Single-Precision to Half-Precision format XX2-form (xvcvsphp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVIEXPDP, 0xfc0007f8, 0xf00007c0, 0x0, // VSX Vector Insert Exponent Double-Precision XX3-form (xviexpdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVIEXPSP, 0xfc0007f8, 0xf00006c0, 0x0, // VSX Vector Insert Exponent Single-Precision XX3-form (xviexpsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVTSTDCDP, 0xfc0007b8, 0xf00007a8, 0x0, // VSX Vector Test Data Class Double-Precision XX2-form (xvtstdcdp XT,XB,DCMX)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_25_25_29_29_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_25_25_29_29_11_15}},
 	{XVTSTDCSP, 0xfc0007b8, 0xf00006a8, 0x0, // VSX Vector Test Data Class Single-Precision XX2-form (xvtstdcsp XT,XB,DCMX)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_25_25_29_29_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_25_25_29_29_11_15}},
 	{XVXEXPDP, 0xfc1f07fc, 0xf000076c, 0x0, // VSX Vector Extract Exponent Double-Precision XX2-form (xvxexpdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVXEXPSP, 0xfc1f07fc, 0xf008076c, 0x0, // VSX Vector Extract Exponent Single-Precision XX2-form (xvxexpsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVXSIGDP, 0xfc1f07fc, 0xf001076c, 0x0, // VSX Vector Extract Significand Double-Precision XX2-form (xvxsigdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVXSIGSP, 0xfc1f07fc, 0xf009076c, 0x0, // VSX Vector Extract Significand Single-Precision XX2-form (xvxsigsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XXBRD, 0xfc1f07fc, 0xf017076c, 0x0, // VSX Vector Byte-Reverse Doubleword XX2-form (xxbrd XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XXBRH, 0xfc1f07fc, 0xf007076c, 0x0, // VSX Vector Byte-Reverse Halfword XX2-form (xxbrh XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XXBRQ, 0xfc1f07fc, 0xf01f076c, 0x0, // VSX Vector Byte-Reverse Quadword XX2-form (xxbrq XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XXBRW, 0xfc1f07fc, 0xf00f076c, 0x0, // VSX Vector Byte-Reverse Word XX2-form (xxbrw XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XXEXTRACTUW, 0xfc0007fc, 0xf0000294, 0x100000, // VSX Vector Extract Unsigned Word XX2-form (xxextractuw XT,XB,UIM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_12_15}},
 	{XXINSERTW, 0xfc0007fc, 0xf00002d4, 0x100000, // VSX Vector Insert Word XX2-form (xxinsertw XT,XB,UIM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_12_15}},
 	{XXPERM, 0xfc0007f8, 0xf00000d0, 0x0, // VSX Vector Permute XX3-form (xxperm XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXPERMR, 0xfc0007f8, 0xf00001d0, 0x0, // VSX Vector Permute Right-indexed XX3-form (xxpermr XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXSPLTIB, 0xfc1807fe, 0xf00002d0, 0x0, // VSX Vector Splat Immediate Byte X-form (xxspltib XT,IMM8)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_ImmUnsigned_13_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_ImmUnsigned_13_20}},
 	{BCDADDCC, 0xfc0005ff, 0x10000401, 0x0, // Decimal Add Modulo VX-form (bcdadd. VRT,VRA,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCDSUBCC, 0xfc0005ff, 0x10000441, 0x0, // Decimal Subtract Modulo VX-form (bcdsub. VRT,VRA,VRB,PS)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_22}},
 	{BCTAR, 0xfc0007ff, 0x4c000460, 0xe000, // Branch Conditional to Branch Target Address Register XL-form (bctar BO,BI,BH)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
 	{BCTARL, 0xfc0007ff, 0x4c000461, 0xe000, // Branch Conditional to Branch Target Address Register XL-form (bctarl BO,BI,BH)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
 	{CLRBHRB, 0xfc0007fe, 0x7c00035c, 0x3fff801, // Clear BHRB X-form (clrbhrb)
-		[5]*argField{}},
+		[6]*argField{}},
 	{FMRGEW, 0xfc0007fe, 0xfc00078c, 0x1, // Floating Merge Even Word X-form (fmrgew FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FMRGOW, 0xfc0007fe, 0xfc00068c, 0x1, // Floating Merge Odd Word X-form (fmrgow FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{ICBT, 0xfc0007fe, 0x7c00002c, 0x2000001, // Instruction Cache Block Touch X-form (icbt CT, RA, RB)
-		[5]*argField{ap_ImmUnsigned_7_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_ImmUnsigned_7_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LQARX, 0xfc0007fe, 0x7c000228, 0x0, // Load Quadword And Reserve Indexed X-form (lqarx RTp,RA,RB,EH)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
 	{LXSIWAX, 0xfc0007fe, 0x7c000098, 0x0, // Load VSX Scalar as Integer Word Algebraic Indexed X-form (lxsiwax XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXSIWZX, 0xfc0007fe, 0x7c000018, 0x0, // Load VSX Scalar as Integer Word & Zero Indexed X-form (lxsiwzx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXSSPX, 0xfc0007fe, 0x7c000418, 0x0, // Load VSX Scalar Single-Precision Indexed X-form (lxsspx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MFBHRBE, 0xfc0007fe, 0x7c00025c, 0x1, // Move From BHRB XFX-form (mfbhrbe RT,BHRBE)
-		[5]*argField{ap_Reg_6_10, ap_SpReg_11_20}},
+		[6]*argField{ap_Reg_6_10, ap_SpReg_11_20}},
 	{MFVSRD, 0xfc0007fe, 0x7c000066, 0xf800, // Move From VSR Doubleword X-form (mfvsrd RA,XS)
-		[5]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}},
 	{MFVSRWZ, 0xfc0007fe, 0x7c0000e6, 0xf800, // Move From VSR Word and Zero X-form (mfvsrwz RA,XS)
-		[5]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}},
 	{MSGCLR, 0xfc0007fe, 0x7c0001dc, 0x3ff0001, // Message Clear X-form (msgclr RB)
-		[5]*argField{ap_Reg_16_20}},
+		[6]*argField{ap_Reg_16_20}},
 	{MSGCLRP, 0xfc0007fe, 0x7c00015c, 0x3ff0001, // Message Clear Privileged X-form (msgclrp RB)
-		[5]*argField{ap_Reg_16_20}},
+		[6]*argField{ap_Reg_16_20}},
 	{MSGSND, 0xfc0007fe, 0x7c00019c, 0x3ff0001, // Message Send X-form (msgsnd RB)
-		[5]*argField{ap_Reg_16_20}},
+		[6]*argField{ap_Reg_16_20}},
 	{MSGSNDP, 0xfc0007fe, 0x7c00011c, 0x3ff0001, // Message Send Privileged X-form (msgsndp RB)
-		[5]*argField{ap_Reg_16_20}},
+		[6]*argField{ap_Reg_16_20}},
 	{MTVSRD, 0xfc0007fe, 0x7c000166, 0xf800, // Move To VSR Doubleword X-form (mtvsrd XT,RA)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
 	{MTVSRWA, 0xfc0007fe, 0x7c0001a6, 0xf800, // Move To VSR Word Algebraic X-form (mtvsrwa XT,RA)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
 	{MTVSRWZ, 0xfc0007fe, 0x7c0001e6, 0xf800, // Move To VSR Word and Zero X-form (mtvsrwz XT,RA)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
 	{RFEBB, 0xfc0007fe, 0x4c000124, 0x3fff001, // Return from Event Based Branch XL-form (rfebb S)
-		[5]*argField{ap_ImmUnsigned_20_20}},
+		[6]*argField{ap_ImmUnsigned_20_20}},
 	{STQCXCC, 0xfc0007ff, 0x7c00016d, 0x0, // Store Quadword Conditional Indexed X-form (stqcx. RSp,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXSIWX, 0xfc0007fe, 0x7c000118, 0x0, // Store VSX Scalar as Integer Word Indexed X-form (stxsiwx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXSSPX, 0xfc0007fe, 0x7c000518, 0x0, // Store VSX Scalar Single-Precision Indexed X-form (stxsspx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{VADDCUQ, 0xfc0007ff, 0x10000140, 0x0, // Vector Add & write Carry Unsigned Quadword VX-form (vaddcuq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDECUQ, 0xfc00003f, 0x1000003d, 0x0, // Vector Add Extended & write Carry Unsigned Quadword VA-form (vaddecuq VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VADDEUQM, 0xfc00003f, 0x1000003c, 0x0, // Vector Add Extended Unsigned Quadword Modulo VA-form (vaddeuqm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VADDUDM, 0xfc0007ff, 0x100000c0, 0x0, // Vector Add Unsigned Doubleword Modulo VX-form (vaddudm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDUQM, 0xfc0007ff, 0x10000100, 0x0, // Vector Add Unsigned Quadword Modulo VX-form (vadduqm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VBPERMQ, 0xfc0007ff, 0x1000054c, 0x0, // Vector Bit Permute Quadword VX-form (vbpermq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCIPHER, 0xfc0007ff, 0x10000508, 0x0, // Vector AES Cipher VX-form (vcipher VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCIPHERLAST, 0xfc0007ff, 0x10000509, 0x0, // Vector AES Cipher Last VX-form (vcipherlast VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCLZB, 0xfc0007ff, 0x10000702, 0x1f0000, // Vector Count Leading Zeros Byte VX-form (vclzb VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VCLZD, 0xfc0007ff, 0x100007c2, 0x1f0000, // Vector Count Leading Zeros Doubleword VX-form (vclzd VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VCLZH, 0xfc0007ff, 0x10000742, 0x1f0000, // Vector Count Leading Zeros Halfword VX-form (vclzh VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VCLZW, 0xfc0007ff, 0x10000782, 0x1f0000, // Vector Count Leading Zeros Word VX-form (vclzw VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VCMPEQUD, 0xfc0007ff, 0x100000c7, 0x0, // Vector Compare Equal Unsigned Doubleword VC-form (vcmpequd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUDCC, 0xfc0007ff, 0x100004c7, 0x0, // Vector Compare Equal Unsigned Doubleword VC-form (vcmpequd. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSD, 0xfc0007ff, 0x100003c7, 0x0, // Vector Compare Greater Than Signed Doubleword VC-form (vcmpgtsd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSDCC, 0xfc0007ff, 0x100007c7, 0x0, // Vector Compare Greater Than Signed Doubleword VC-form (vcmpgtsd. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUD, 0xfc0007ff, 0x100002c7, 0x0, // Vector Compare Greater Than Unsigned Doubleword VC-form (vcmpgtud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUDCC, 0xfc0007ff, 0x100006c7, 0x0, // Vector Compare Greater Than Unsigned Doubleword VC-form (vcmpgtud. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VEQV, 0xfc0007ff, 0x10000684, 0x0, // Vector Logical Equivalence VX-form (veqv VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VGBBD, 0xfc0007ff, 0x1000050c, 0x1f0000, // Vector Gather Bits by Bytes by Doubleword VX-form (vgbbd VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VMAXSD, 0xfc0007ff, 0x100001c2, 0x0, // Vector Maximum Signed Doubleword VX-form (vmaxsd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMAXUD, 0xfc0007ff, 0x100000c2, 0x0, // Vector Maximum Unsigned Doubleword VX-form (vmaxud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMINSD, 0xfc0007ff, 0x100003c2, 0x0, // Vector Minimum Signed Doubleword VX-form (vminsd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMINUD, 0xfc0007ff, 0x100002c2, 0x0, // Vector Minimum Unsigned Doubleword VX-form (vminud VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMRGEW, 0xfc0007ff, 0x1000078c, 0x0, // Vector Merge Even Word VX-form (vmrgew VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMRGOW, 0xfc0007ff, 0x1000068c, 0x0, // Vector Merge Odd Word VX-form (vmrgow VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULESW, 0xfc0007ff, 0x10000388, 0x0, // Vector Multiply Even Signed Word VX-form (vmulesw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULEUW, 0xfc0007ff, 0x10000288, 0x0, // Vector Multiply Even Unsigned Word VX-form (vmuleuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULOSW, 0xfc0007ff, 0x10000188, 0x0, // Vector Multiply Odd Signed Word VX-form (vmulosw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULOUW, 0xfc0007ff, 0x10000088, 0x0, // Vector Multiply Odd Unsigned Word VX-form (vmulouw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULUWM, 0xfc0007ff, 0x10000089, 0x0, // Vector Multiply Unsigned Word Modulo VX-form (vmuluwm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VNAND, 0xfc0007ff, 0x10000584, 0x0, // Vector Logical NAND VX-form (vnand VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VNCIPHER, 0xfc0007ff, 0x10000548, 0x0, // Vector AES Inverse Cipher VX-form (vncipher VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VNCIPHERLAST, 0xfc0007ff, 0x10000549, 0x0, // Vector AES Inverse Cipher Last VX-form (vncipherlast VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VORC, 0xfc0007ff, 0x10000544, 0x0, // Vector Logical OR with Complement VX-form (vorc VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPERMXOR, 0xfc00003f, 0x1000002d, 0x0, // Vector Permute & Exclusive-OR VA-form (vpermxor VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VPKSDSS, 0xfc0007ff, 0x100005ce, 0x0, // Vector Pack Signed Doubleword Signed Saturate VX-form (vpksdss VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKSDUS, 0xfc0007ff, 0x1000054e, 0x0, // Vector Pack Signed Doubleword Unsigned Saturate VX-form (vpksdus VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKUDUM, 0xfc0007ff, 0x1000044e, 0x0, // Vector Pack Unsigned Doubleword Unsigned Modulo VX-form (vpkudum VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKUDUS, 0xfc0007ff, 0x100004ce, 0x0, // Vector Pack Unsigned Doubleword Unsigned Saturate VX-form (vpkudus VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPMSUMB, 0xfc0007ff, 0x10000408, 0x0, // Vector Polynomial Multiply-Sum Byte VX-form (vpmsumb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPMSUMD, 0xfc0007ff, 0x100004c8, 0x0, // Vector Polynomial Multiply-Sum Doubleword VX-form (vpmsumd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPMSUMH, 0xfc0007ff, 0x10000448, 0x0, // Vector Polynomial Multiply-Sum Halfword VX-form (vpmsumh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPMSUMW, 0xfc0007ff, 0x10000488, 0x0, // Vector Polynomial Multiply-Sum Word VX-form (vpmsumw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPOPCNTB, 0xfc0007ff, 0x10000703, 0x1f0000, // Vector Population Count Byte VX-form (vpopcntb VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VPOPCNTD, 0xfc0007ff, 0x100007c3, 0x1f0000, // Vector Population Count Doubleword VX-form (vpopcntd VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VPOPCNTH, 0xfc0007ff, 0x10000743, 0x1f0000, // Vector Population Count Halfword VX-form (vpopcnth VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VPOPCNTW, 0xfc0007ff, 0x10000783, 0x1f0000, // Vector Population Count Word VX-form (vpopcntw VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VRLD, 0xfc0007ff, 0x100000c4, 0x0, // Vector Rotate Left Doubleword VX-form (vrld VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSBOX, 0xfc0007ff, 0x100005c8, 0xf800, // Vector AES SubBytes VX-form (vsbox VRT,VRA)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15}},
 	{VSHASIGMAD, 0xfc0007ff, 0x100006c2, 0x0, // Vector SHA-512 Sigma Doubleword VX-form (vshasigmad VRT,VRA,ST,SIX)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_ImmUnsigned_16_16, ap_ImmUnsigned_17_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_ImmUnsigned_16_16, ap_ImmUnsigned_17_20}},
 	{VSHASIGMAW, 0xfc0007ff, 0x10000682, 0x0, // Vector SHA-256 Sigma Word VX-form (vshasigmaw VRT,VRA,ST,SIX)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_ImmUnsigned_16_16, ap_ImmUnsigned_17_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_ImmUnsigned_16_16, ap_ImmUnsigned_17_20}},
 	{VSLD, 0xfc0007ff, 0x100005c4, 0x0, // Vector Shift Left Doubleword VX-form (vsld VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRAD, 0xfc0007ff, 0x100003c4, 0x0, // Vector Shift Right Algebraic Doubleword VX-form (vsrad VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRD, 0xfc0007ff, 0x100006c4, 0x0, // Vector Shift Right Doubleword VX-form (vsrd VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBCUQ, 0xfc0007ff, 0x10000540, 0x0, // Vector Subtract & write Carry-out Unsigned Quadword VX-form (vsubcuq VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBECUQ, 0xfc00003f, 0x1000003f, 0x0, // Vector Subtract Extended & write Carry-out Unsigned Quadword VA-form (vsubecuq VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VSUBEUQM, 0xfc00003f, 0x1000003e, 0x0, // Vector Subtract Extended Unsigned Quadword Modulo VA-form (vsubeuqm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VSUBUDM, 0xfc0007ff, 0x100004c0, 0x0, // Vector Subtract Unsigned Doubleword Modulo VX-form (vsubudm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBUQM, 0xfc0007ff, 0x10000500, 0x0, // Vector Subtract Unsigned Quadword Modulo VX-form (vsubuqm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VUPKHSW, 0xfc0007ff, 0x1000064e, 0x1f0000, // Vector Unpack High Signed Word VX-form (vupkhsw VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VUPKLSW, 0xfc0007ff, 0x100006ce, 0x1f0000, // Vector Unpack Low Signed Word VX-form (vupklsw VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{XSADDSP, 0xfc0007f8, 0xf0000000, 0x0, // VSX Scalar Add Single-Precision XX3-form (xsaddsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCVDPSPN, 0xfc0007fc, 0xf000042c, 0x1f0000, // VSX Scalar Convert Scalar Single-Precision to Vector Single-Precision format Non-signalling XX2-form (xscvdpspn XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVSPDPN, 0xfc0007fc, 0xf000052c, 0x1f0000, // VSX Scalar Convert Single-Precision to Double-Precision format Non-signalling XX2-form (xscvspdpn XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVSXDSP, 0xfc0007fc, 0xf00004e0, 0x1f0000, // VSX Scalar Convert with round Signed Doubleword to Single-Precision format XX2-form (xscvsxdsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVUXDSP, 0xfc0007fc, 0xf00004a0, 0x1f0000, // VSX Scalar Convert with round Unsigned Doubleword to Single-Precision XX2-form (xscvuxdsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSDIVSP, 0xfc0007f8, 0xf00000c0, 0x0, // VSX Scalar Divide Single-Precision XX3-form (xsdivsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMADDASP, 0xfc0007f8, 0xf0000008, 0x0, // VSX Scalar Multiply-Add Type-A Single-Precision XX3-form (xsmaddasp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMADDMSP, 0xfc0007f8, 0xf0000048, 0x0, // VSX Scalar Multiply-Add Type-M Single-Precision XX3-form (xsmaddmsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMSUBASP, 0xfc0007f8, 0xf0000088, 0x0, // VSX Scalar Multiply-Subtract Type-A Single-Precision XX3-form (xsmsubasp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMSUBMSP, 0xfc0007f8, 0xf00000c8, 0x0, // VSX Scalar Multiply-Subtract Type-M Single-Precision XX3-form (xsmsubmsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMULSP, 0xfc0007f8, 0xf0000080, 0x0, // VSX Scalar Multiply Single-Precision XX3-form (xsmulsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSNMADDASP, 0xfc0007f8, 0xf0000408, 0x0, // VSX Scalar Negative Multiply-Add Type-A Single-Precision XX3-form (xsnmaddasp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSNMADDMSP, 0xfc0007f8, 0xf0000448, 0x0, // VSX Scalar Negative Multiply-Add Type-M Single-Precision XX3-form (xsnmaddmsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSNMSUBASP, 0xfc0007f8, 0xf0000488, 0x0, // VSX Scalar Negative Multiply-Subtract Type-A Single-Precision XX3-form (xsnmsubasp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSNMSUBMSP, 0xfc0007f8, 0xf00004c8, 0x0, // VSX Scalar Negative Multiply-Subtract Type-M Single-Precision XX3-form (xsnmsubmsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSRESP, 0xfc0007fc, 0xf0000068, 0x1f0000, // VSX Scalar Reciprocal Estimate Single-Precision XX2-form (xsresp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSRSP, 0xfc0007fc, 0xf0000464, 0x1f0000, // VSX Scalar Round to Single-Precision XX2-form (xsrsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSRSQRTESP, 0xfc0007fc, 0xf0000028, 0x1f0000, // VSX Scalar Reciprocal Square Root Estimate Single-Precision XX2-form (xsrsqrtesp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSSQRTSP, 0xfc0007fc, 0xf000002c, 0x1f0000, // VSX Scalar Square Root Single-Precision XX2-form (xssqrtsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSSUBSP, 0xfc0007f8, 0xf0000040, 0x0, // VSX Scalar Subtract Single-Precision XX3-form (xssubsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXLEQV, 0xfc0007f8, 0xf00005d0, 0x0, // VSX Vector Logical Equivalence XX3-form (xxleqv XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXLNAND, 0xfc0007f8, 0xf0000590, 0x0, // VSX Vector Logical NAND XX3-form (xxlnand XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXLORC, 0xfc0007f8, 0xf0000550, 0x0, // VSX Vector Logical OR with Complement XX3-form (xxlorc XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{ADDG6S, 0xfc0003fe, 0x7c000094, 0x401, // Add and Generate Sixes XO-form (addg6s RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{BPERMD, 0xfc0007fe, 0x7c0001f8, 0x1, // Bit Permute Doubleword X-form (bpermd RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{CBCDTD, 0xfc0007fe, 0x7c000274, 0xf801, // Convert Binary Coded Decimal To Declets X-form (cbcdtd RA, RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{CDTBCD, 0xfc0007fe, 0x7c000234, 0xf801, // Convert Declets To Binary Coded Decimal X-form (cdtbcd RA, RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{DCFFIX, 0xfc0007ff, 0xec000644, 0x1f0000, // DFP Convert From Fixed X-form (dcffix FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCFFIXCC, 0xfc0007ff, 0xec000645, 0x1f0000, // DFP Convert From Fixed X-form (dcffix. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DIVDE, 0xfc0007ff, 0x7c000352, 0x0, // Divide Doubleword Extended XO-form (divde RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDECC, 0xfc0007ff, 0x7c000353, 0x0, // Divide Doubleword Extended XO-form (divde. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDEO, 0xfc0007ff, 0x7c000752, 0x0, // Divide Doubleword Extended XO-form (divdeo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDEOCC, 0xfc0007ff, 0x7c000753, 0x0, // Divide Doubleword Extended XO-form (divdeo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDEU, 0xfc0007ff, 0x7c000312, 0x0, // Divide Doubleword Extended Unsigned XO-form (divdeu RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDEUCC, 0xfc0007ff, 0x7c000313, 0x0, // Divide Doubleword Extended Unsigned XO-form (divdeu. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDEUO, 0xfc0007ff, 0x7c000712, 0x0, // Divide Doubleword Extended Unsigned XO-form (divdeuo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDEUOCC, 0xfc0007ff, 0x7c000713, 0x0, // Divide Doubleword Extended Unsigned XO-form (divdeuo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWE, 0xfc0007ff, 0x7c000356, 0x0, // Divide Word Extended XO-form (divwe RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWECC, 0xfc0007ff, 0x7c000357, 0x0, // Divide Word Extended XO-form (divwe. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWEO, 0xfc0007ff, 0x7c000756, 0x0, // Divide Word Extended XO-form (divweo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWEOCC, 0xfc0007ff, 0x7c000757, 0x0, // Divide Word Extended XO-form (divweo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWEU, 0xfc0007ff, 0x7c000316, 0x0, // Divide Word Extended Unsigned XO-form (divweu RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWEUCC, 0xfc0007ff, 0x7c000317, 0x0, // Divide Word Extended Unsigned XO-form (divweu. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWEUO, 0xfc0007ff, 0x7c000716, 0x0, // Divide Word Extended Unsigned XO-form (divweuo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWEUOCC, 0xfc0007ff, 0x7c000717, 0x0, // Divide Word Extended Unsigned XO-form (divweuo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{FCFIDS, 0xfc0007ff, 0xec00069c, 0x1f0000, // Floating Convert with round Signed Doubleword to Single-Precision format X-form (fcfids FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCFIDSCC, 0xfc0007ff, 0xec00069d, 0x1f0000, // Floating Convert with round Signed Doubleword to Single-Precision format X-form (fcfids. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCFIDU, 0xfc0007ff, 0xfc00079c, 0x1f0000, // Floating Convert with round Unsigned Doubleword to Double-Precision format X-form (fcfidu FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCFIDUCC, 0xfc0007ff, 0xfc00079d, 0x1f0000, // Floating Convert with round Unsigned Doubleword to Double-Precision format X-form (fcfidu. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCFIDUS, 0xfc0007ff, 0xec00079c, 0x1f0000, // Floating Convert with round Unsigned Doubleword to Single-Precision format X-form (fcfidus FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCFIDUSCC, 0xfc0007ff, 0xec00079d, 0x1f0000, // Floating Convert with round Unsigned Doubleword to Single-Precision format X-form (fcfidus. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIDU, 0xfc0007ff, 0xfc00075c, 0x1f0000, // Floating Convert with round Double-Precision To Unsigned Doubleword format X-form (fctidu FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIDUCC, 0xfc0007ff, 0xfc00075d, 0x1f0000, // Floating Convert with round Double-Precision To Unsigned Doubleword format X-form (fctidu. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIDUZ, 0xfc0007ff, 0xfc00075e, 0x1f0000, // Floating Convert with truncate Double-Precision To Unsigned Doubleword format X-form (fctiduz FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIDUZCC, 0xfc0007ff, 0xfc00075f, 0x1f0000, // Floating Convert with truncate Double-Precision To Unsigned Doubleword format X-form (fctiduz. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIWU, 0xfc0007ff, 0xfc00011c, 0x1f0000, // Floating Convert with round Double-Precision To Unsigned Word format X-form (fctiwu FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIWUCC, 0xfc0007ff, 0xfc00011d, 0x1f0000, // Floating Convert with round Double-Precision To Unsigned Word format X-form (fctiwu. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIWUZ, 0xfc0007ff, 0xfc00011e, 0x1f0000, // Floating Convert with truncate Double-Precision To Unsigned Word format X-form (fctiwuz FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIWUZCC, 0xfc0007ff, 0xfc00011f, 0x1f0000, // Floating Convert with truncate Double-Precision To Unsigned Word format X-form (fctiwuz. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FTDIV, 0xfc0007fe, 0xfc000100, 0x600001, // Floating Test for software Divide X-form (ftdiv BF,FRA,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FTSQRT, 0xfc0007fe, 0xfc000140, 0x7f0001, // Floating Test for software Square Root X-form (ftsqrt BF,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_16_20}},
 	{LBARX, 0xfc0007fe, 0x7c000068, 0x0, // Load Byte And Reserve Indexed X-form (lbarx RT,RA,RB,EH)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
 	{LDBRX, 0xfc0007fe, 0x7c000428, 0x1, // Load Doubleword Byte-Reverse Indexed X-form (ldbrx RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LFIWZX, 0xfc0007fe, 0x7c0006ee, 0x1, // Load Floating-Point as Integer Word & Zero Indexed X-form (lfiwzx FRT,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LHARX, 0xfc0007fe, 0x7c0000e8, 0x0, // Load Halfword And Reserve Indexed Xform (lharx RT,RA,RB,EH)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
 	{LXSDX, 0xfc0007fe, 0x7c000498, 0x0, // Load VSX Scalar Doubleword Indexed X-form (lxsdx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVD2X, 0xfc0007fe, 0x7c000698, 0x0, // Load VSX Vector Doubleword*2 Indexed X-form (lxvd2x XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVDSX, 0xfc0007fe, 0x7c000298, 0x0, // Load VSX Vector Doubleword & Splat Indexed X-form (lxvdsx XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LXVW4X, 0xfc0007fe, 0x7c000618, 0x0, // Load VSX Vector Word*4 Indexed X-form (lxvw4x XT,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{POPCNTD, 0xfc0007fe, 0x7c0003f4, 0xf801, // Population Count Doubleword X-form (popcntd RA, RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{POPCNTW, 0xfc0007fe, 0x7c0002f4, 0xf801, // Population Count Words X-form (popcntw RA, RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{STBCXCC, 0xfc0007ff, 0x7c00056d, 0x0, // Store Byte Conditional Indexed X-form (stbcx. RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STDBRX, 0xfc0007fe, 0x7c000528, 0x1, // Store Doubleword Byte-Reverse Indexed X-form (stdbrx RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STHCXCC, 0xfc0007ff, 0x7c0005ad, 0x0, // Store Halfword Conditional Indexed X-form (sthcx. RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXSDX, 0xfc0007fe, 0x7c000598, 0x0, // Store VSX Scalar Doubleword Indexed X-form (stxsdx XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVD2X, 0xfc0007fe, 0x7c000798, 0x0, // Store VSX Vector Doubleword*2 Indexed X-form (stxvd2x XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STXVW4X, 0xfc0007fe, 0x7c000718, 0x0, // Store VSX Vector Word*4 Indexed X-form (stxvw4x XS,RA,RB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{XSABSDP, 0xfc0007fc, 0xf0000564, 0x1f0000, // VSX Scalar Absolute Double-Precision XX2-form (xsabsdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSADDDP, 0xfc0007f8, 0xf0000100, 0x0, // VSX Scalar Add Double-Precision XX3-form (xsadddp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCMPODP, 0xfc0007f8, 0xf0000158, 0x600001, // VSX Scalar Compare Ordered Double-Precision XX3-form (xscmpodp BF,XA,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCMPUDP, 0xfc0007f8, 0xf0000118, 0x600001, // VSX Scalar Compare Unordered Double-Precision XX3-form (xscmpudp BF,XA,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCPSGNDP, 0xfc0007f8, 0xf0000580, 0x0, // VSX Scalar Copy Sign Double-Precision XX3-form (xscpsgndp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSCVDPSP, 0xfc0007fc, 0xf0000424, 0x1f0000, // VSX Scalar Convert with round Double-Precision to Single-Precision format XX2-form (xscvdpsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVDPSXDS, 0xfc0007fc, 0xf0000560, 0x1f0000, // VSX Scalar Convert with round to zero Double-Precision to Signed Doubleword format XX2-form (xscvdpsxds XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVDPSXWS, 0xfc0007fc, 0xf0000160, 0x1f0000, // VSX Scalar Convert with round to zero Double-Precision to Signed Word format XX2-form (xscvdpsxws XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVDPUXDS, 0xfc0007fc, 0xf0000520, 0x1f0000, // VSX Scalar Convert with round to zero Double-Precision to Unsigned Doubleword format XX2-form (xscvdpuxds XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVDPUXWS, 0xfc0007fc, 0xf0000120, 0x1f0000, // VSX Scalar Convert with round to zero Double-Precision to Unsigned Word format XX2-form (xscvdpuxws XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVSPDP, 0xfc0007fc, 0xf0000524, 0x1f0000, // VSX Scalar Convert Single-Precision to Double-Precision format XX2-form (xscvspdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVSXDDP, 0xfc0007fc, 0xf00005e0, 0x1f0000, // VSX Scalar Convert with round Signed Doubleword to Double-Precision format XX2-form (xscvsxddp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSCVUXDDP, 0xfc0007fc, 0xf00005a0, 0x1f0000, // VSX Scalar Convert with round Unsigned Doubleword to Double-Precision format XX2-form (xscvuxddp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSDIVDP, 0xfc0007f8, 0xf00001c0, 0x0, // VSX Scalar Divide Double-Precision XX3-form (xsdivdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMADDADP, 0xfc0007f8, 0xf0000108, 0x0, // VSX Scalar Multiply-Add Type-A Double-Precision XX3-form (xsmaddadp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMADDMDP, 0xfc0007f8, 0xf0000148, 0x0, // VSX Scalar Multiply-Add Type-M Double-Precision XX3-form (xsmaddmdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMAXDP, 0xfc0007f8, 0xf0000500, 0x0, // VSX Scalar Maximum Double-Precision XX3-form (xsmaxdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMINDP, 0xfc0007f8, 0xf0000540, 0x0, // VSX Scalar Minimum Double-Precision XX3-form (xsmindp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMSUBADP, 0xfc0007f8, 0xf0000188, 0x0, // VSX Scalar Multiply-Subtract Type-A Double-Precision XX3-form (xsmsubadp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMSUBMDP, 0xfc0007f8, 0xf00001c8, 0x0, // VSX Scalar Multiply-Subtract Type-M Double-Precision XX3-form (xsmsubmdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSMULDP, 0xfc0007f8, 0xf0000180, 0x0, // VSX Scalar Multiply Double-Precision XX3-form (xsmuldp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSNABSDP, 0xfc0007fc, 0xf00005a4, 0x1f0000, // VSX Scalar Negative Absolute Double-Precision XX2-form (xsnabsdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSNEGDP, 0xfc0007fc, 0xf00005e4, 0x1f0000, // VSX Scalar Negate Double-Precision XX2-form (xsnegdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSNMADDADP, 0xfc0007f8, 0xf0000508, 0x0, // VSX Scalar Negative Multiply-Add Type-A Double-Precision XX3-form (xsnmaddadp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSNMADDMDP, 0xfc0007f8, 0xf0000548, 0x0, // VSX Scalar Negative Multiply-Add Type-M Double-Precision XX3-form (xsnmaddmdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSNMSUBADP, 0xfc0007f8, 0xf0000588, 0x0, // VSX Scalar Negative Multiply-Subtract Type-A Double-Precision XX3-form (xsnmsubadp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSNMSUBMDP, 0xfc0007f8, 0xf00005c8, 0x0, // VSX Scalar Negative Multiply-Subtract Type-M Double-Precision XX3-form (xsnmsubmdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSRDPI, 0xfc0007fc, 0xf0000124, 0x1f0000, // VSX Scalar Round to Double-Precision Integer using round to Nearest Away XX2-form (xsrdpi XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSRDPIC, 0xfc0007fc, 0xf00001ac, 0x1f0000, // VSX Scalar Round to Double-Precision Integer exact using Current rounding mode XX2-form (xsrdpic XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSRDPIM, 0xfc0007fc, 0xf00001e4, 0x1f0000, // VSX Scalar Round to Double-Precision Integer using round toward -Infinity XX2-form (xsrdpim XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSRDPIP, 0xfc0007fc, 0xf00001a4, 0x1f0000, // VSX Scalar Round to Double-Precision Integer using round toward +Infinity XX2-form (xsrdpip XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSRDPIZ, 0xfc0007fc, 0xf0000164, 0x1f0000, // VSX Scalar Round to Double-Precision Integer using round toward Zero XX2-form (xsrdpiz XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSREDP, 0xfc0007fc, 0xf0000168, 0x1f0000, // VSX Scalar Reciprocal Estimate Double-Precision XX2-form (xsredp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSRSQRTEDP, 0xfc0007fc, 0xf0000128, 0x1f0000, // VSX Scalar Reciprocal Square Root Estimate Double-Precision XX2-form (xsrsqrtedp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSSQRTDP, 0xfc0007fc, 0xf000012c, 0x1f0000, // VSX Scalar Square Root Double-Precision XX2-form (xssqrtdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XSSUBDP, 0xfc0007f8, 0xf0000140, 0x0, // VSX Scalar Subtract Double-Precision XX3-form (xssubdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSTDIVDP, 0xfc0007f8, 0xf00001e8, 0x600001, // VSX Scalar Test for software Divide Double-Precision XX3-form (xstdivdp BF,XA,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XSTSQRTDP, 0xfc0007fc, 0xf00001a8, 0x7f0001, // VSX Scalar Test for software Square Root Double-Precision XX2-form (xstsqrtdp BF,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
 	{XVABSDP, 0xfc0007fc, 0xf0000764, 0x1f0000, // VSX Vector Absolute Value Double-Precision XX2-form (xvabsdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVABSSP, 0xfc0007fc, 0xf0000664, 0x1f0000, // VSX Vector Absolute Value Single-Precision XX2-form (xvabssp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVADDDP, 0xfc0007f8, 0xf0000300, 0x0, // VSX Vector Add Double-Precision XX3-form (xvadddp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVADDSP, 0xfc0007f8, 0xf0000200, 0x0, // VSX Vector Add Single-Precision XX3-form (xvaddsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPEQDP, 0xfc0007f8, 0xf0000318, 0x0, // VSX Vector Compare Equal To Double-Precision XX3-form (xvcmpeqdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPEQDPCC, 0xfc0007f8, 0xf0000718, 0x0, // VSX Vector Compare Equal To Double-Precision XX3-form (xvcmpeqdp. XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPEQSP, 0xfc0007f8, 0xf0000218, 0x0, // VSX Vector Compare Equal To Single-Precision XX3-form (xvcmpeqsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPEQSPCC, 0xfc0007f8, 0xf0000618, 0x0, // VSX Vector Compare Equal To Single-Precision XX3-form (xvcmpeqsp. XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPGEDP, 0xfc0007f8, 0xf0000398, 0x0, // VSX Vector Compare Greater Than or Equal To Double-Precision XX3-form (xvcmpgedp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPGEDPCC, 0xfc0007f8, 0xf0000798, 0x0, // VSX Vector Compare Greater Than or Equal To Double-Precision XX3-form (xvcmpgedp. XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPGESP, 0xfc0007f8, 0xf0000298, 0x0, // VSX Vector Compare Greater Than or Equal To Single-Precision XX3-form (xvcmpgesp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPGESPCC, 0xfc0007f8, 0xf0000698, 0x0, // VSX Vector Compare Greater Than or Equal To Single-Precision XX3-form (xvcmpgesp. XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPGTDP, 0xfc0007f8, 0xf0000358, 0x0, // VSX Vector Compare Greater Than Double-Precision XX3-form (xvcmpgtdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPGTDPCC, 0xfc0007f8, 0xf0000758, 0x0, // VSX Vector Compare Greater Than Double-Precision XX3-form (xvcmpgtdp. XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPGTSP, 0xfc0007f8, 0xf0000258, 0x0, // VSX Vector Compare Greater Than Single-Precision XX3-form (xvcmpgtsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCMPGTSPCC, 0xfc0007f8, 0xf0000658, 0x0, // VSX Vector Compare Greater Than Single-Precision XX3-form (xvcmpgtsp. XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCPSGNDP, 0xfc0007f8, 0xf0000780, 0x0, // VSX Vector Copy Sign Double-Precision XX3-form (xvcpsgndp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCPSGNSP, 0xfc0007f8, 0xf0000680, 0x0, // VSX Vector Copy Sign Single-Precision XX3-form (xvcpsgnsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVCVDPSP, 0xfc0007fc, 0xf0000624, 0x1f0000, // VSX Vector Convert with round Double-Precision to Single-Precision format XX2-form (xvcvdpsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVDPSXDS, 0xfc0007fc, 0xf0000760, 0x1f0000, // VSX Vector Convert with round to zero Double-Precision to Signed Doubleword format XX2-form (xvcvdpsxds XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVDPSXWS, 0xfc0007fc, 0xf0000360, 0x1f0000, // VSX Vector Convert with round to zero Double-Precision to Signed Word format XX2-form (xvcvdpsxws XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVDPUXDS, 0xfc0007fc, 0xf0000720, 0x1f0000, // VSX Vector Convert with round to zero Double-Precision to Unsigned Doubleword format XX2-form (xvcvdpuxds XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVDPUXWS, 0xfc0007fc, 0xf0000320, 0x1f0000, // VSX Vector Convert with round to zero Double-Precision to Unsigned Word format XX2-form (xvcvdpuxws XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSPDP, 0xfc0007fc, 0xf0000724, 0x1f0000, // VSX Vector Convert Single-Precision to Double-Precision format XX2-form (xvcvspdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSPSXDS, 0xfc0007fc, 0xf0000660, 0x1f0000, // VSX Vector Convert with round to zero Single-Precision to Signed Doubleword format XX2-form (xvcvspsxds XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSPSXWS, 0xfc0007fc, 0xf0000260, 0x1f0000, // VSX Vector Convert with round to zero Single-Precision to Signed Word format XX2-form (xvcvspsxws XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSPUXDS, 0xfc0007fc, 0xf0000620, 0x1f0000, // VSX Vector Convert with round to zero Single-Precision to Unsigned Doubleword format XX2-form (xvcvspuxds XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSPUXWS, 0xfc0007fc, 0xf0000220, 0x1f0000, // VSX Vector Convert with round to zero Single-Precision to Unsigned Word format XX2-form (xvcvspuxws XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSXDDP, 0xfc0007fc, 0xf00007e0, 0x1f0000, // VSX Vector Convert with round Signed Doubleword to Double-Precision format XX2-form (xvcvsxddp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSXDSP, 0xfc0007fc, 0xf00006e0, 0x1f0000, // VSX Vector Convert with round Signed Doubleword to Single-Precision format XX2-form (xvcvsxdsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSXWDP, 0xfc0007fc, 0xf00003e0, 0x1f0000, // VSX Vector Convert Signed Word to Double-Precision format XX2-form (xvcvsxwdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVSXWSP, 0xfc0007fc, 0xf00002e0, 0x1f0000, // VSX Vector Convert with round Signed Word to Single-Precision format XX2-form (xvcvsxwsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVUXDDP, 0xfc0007fc, 0xf00007a0, 0x1f0000, // VSX Vector Convert with round Unsigned Doubleword to Double-Precision format XX2-form (xvcvuxddp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVUXDSP, 0xfc0007fc, 0xf00006a0, 0x1f0000, // VSX Vector Convert with round Unsigned Doubleword to Single-Precision format XX2-form (xvcvuxdsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVUXWDP, 0xfc0007fc, 0xf00003a0, 0x1f0000, // VSX Vector Convert Unsigned Word to Double-Precision format XX2-form (xvcvuxwdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVCVUXWSP, 0xfc0007fc, 0xf00002a0, 0x1f0000, // VSX Vector Convert with round Unsigned Word to Single-Precision format XX2-form (xvcvuxwsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVDIVDP, 0xfc0007f8, 0xf00003c0, 0x0, // VSX Vector Divide Double-Precision XX3-form (xvdivdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVDIVSP, 0xfc0007f8, 0xf00002c0, 0x0, // VSX Vector Divide Single-Precision XX3-form (xvdivsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMADDADP, 0xfc0007f8, 0xf0000308, 0x0, // VSX Vector Multiply-Add Type-A Double-Precision XX3-form (xvmaddadp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMADDASP, 0xfc0007f8, 0xf0000208, 0x0, // VSX Vector Multiply-Add Type-A Single-Precision XX3-form (xvmaddasp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMADDMDP, 0xfc0007f8, 0xf0000348, 0x0, // VSX Vector Multiply-Add Type-M Double-Precision XX3-form (xvmaddmdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMADDMSP, 0xfc0007f8, 0xf0000248, 0x0, // VSX Vector Multiply-Add Type-M Single-Precision XX3-form (xvmaddmsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMAXDP, 0xfc0007f8, 0xf0000700, 0x0, // VSX Vector Maximum Double-Precision XX3-form (xvmaxdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMAXSP, 0xfc0007f8, 0xf0000600, 0x0, // VSX Vector Maximum Single-Precision XX3-form (xvmaxsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMINDP, 0xfc0007f8, 0xf0000740, 0x0, // VSX Vector Minimum Double-Precision XX3-form (xvmindp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMINSP, 0xfc0007f8, 0xf0000640, 0x0, // VSX Vector Minimum Single-Precision XX3-form (xvminsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMSUBADP, 0xfc0007f8, 0xf0000388, 0x0, // VSX Vector Multiply-Subtract Type-A Double-Precision XX3-form (xvmsubadp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMSUBASP, 0xfc0007f8, 0xf0000288, 0x0, // VSX Vector Multiply-Subtract Type-A Single-Precision XX3-form (xvmsubasp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMSUBMDP, 0xfc0007f8, 0xf00003c8, 0x0, // VSX Vector Multiply-Subtract Type-M Double-Precision XX3-form (xvmsubmdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMSUBMSP, 0xfc0007f8, 0xf00002c8, 0x0, // VSX Vector Multiply-Subtract Type-M Single-Precision XX3-form (xvmsubmsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMULDP, 0xfc0007f8, 0xf0000380, 0x0, // VSX Vector Multiply Double-Precision XX3-form (xvmuldp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVMULSP, 0xfc0007f8, 0xf0000280, 0x0, // VSX Vector Multiply Single-Precision XX3-form (xvmulsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVNABSDP, 0xfc0007fc, 0xf00007a4, 0x1f0000, // VSX Vector Negative Absolute Double-Precision XX2-form (xvnabsdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVNABSSP, 0xfc0007fc, 0xf00006a4, 0x1f0000, // VSX Vector Negative Absolute Single-Precision XX2-form (xvnabssp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVNEGDP, 0xfc0007fc, 0xf00007e4, 0x1f0000, // VSX Vector Negate Double-Precision XX2-form (xvnegdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVNEGSP, 0xfc0007fc, 0xf00006e4, 0x1f0000, // VSX Vector Negate Single-Precision XX2-form (xvnegsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVNMADDADP, 0xfc0007f8, 0xf0000708, 0x0, // VSX Vector Negative Multiply-Add Type-A Double-Precision XX3-form (xvnmaddadp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVNMADDASP, 0xfc0007f8, 0xf0000608, 0x0, // VSX Vector Negative Multiply-Add Type-A Single-Precision XX3-form (xvnmaddasp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVNMADDMDP, 0xfc0007f8, 0xf0000748, 0x0, // VSX Vector Negative Multiply-Add Type-M Double-Precision XX3-form (xvnmaddmdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVNMADDMSP, 0xfc0007f8, 0xf0000648, 0x0, // VSX Vector Negative Multiply-Add Type-M Single-Precision XX3-form (xvnmaddmsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVNMSUBADP, 0xfc0007f8, 0xf0000788, 0x0, // VSX Vector Negative Multiply-Subtract Type-A Double-Precision XX3-form (xvnmsubadp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVNMSUBASP, 0xfc0007f8, 0xf0000688, 0x0, // VSX Vector Negative Multiply-Subtract Type-A Single-Precision XX3-form (xvnmsubasp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVNMSUBMDP, 0xfc0007f8, 0xf00007c8, 0x0, // VSX Vector Negative Multiply-Subtract Type-M Double-Precision XX3-form (xvnmsubmdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVNMSUBMSP, 0xfc0007f8, 0xf00006c8, 0x0, // VSX Vector Negative Multiply-Subtract Type-M Single-Precision XX3-form (xvnmsubmsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVRDPI, 0xfc0007fc, 0xf0000324, 0x1f0000, // VSX Vector Round to Double-Precision Integer using round to Nearest Away XX2-form (xvrdpi XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRDPIC, 0xfc0007fc, 0xf00003ac, 0x1f0000, // VSX Vector Round to Double-Precision Integer Exact using Current rounding mode XX2-form (xvrdpic XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRDPIM, 0xfc0007fc, 0xf00003e4, 0x1f0000, // VSX Vector Round to Double-Precision Integer using round toward -Infinity XX2-form (xvrdpim XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRDPIP, 0xfc0007fc, 0xf00003a4, 0x1f0000, // VSX Vector Round to Double-Precision Integer using round toward +Infinity XX2-form (xvrdpip XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRDPIZ, 0xfc0007fc, 0xf0000364, 0x1f0000, // VSX Vector Round to Double-Precision Integer using round toward Zero XX2-form (xvrdpiz XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVREDP, 0xfc0007fc, 0xf0000368, 0x1f0000, // VSX Vector Reciprocal Estimate Double-Precision XX2-form (xvredp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRESP, 0xfc0007fc, 0xf0000268, 0x1f0000, // VSX Vector Reciprocal Estimate Single-Precision XX2-form (xvresp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRSPI, 0xfc0007fc, 0xf0000224, 0x1f0000, // VSX Vector Round to Single-Precision Integer using round to Nearest Away XX2-form (xvrspi XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRSPIC, 0xfc0007fc, 0xf00002ac, 0x1f0000, // VSX Vector Round to Single-Precision Integer Exact using Current rounding mode XX2-form (xvrspic XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRSPIM, 0xfc0007fc, 0xf00002e4, 0x1f0000, // VSX Vector Round to Single-Precision Integer using round toward -Infinity XX2-form (xvrspim XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRSPIP, 0xfc0007fc, 0xf00002a4, 0x1f0000, // VSX Vector Round to Single-Precision Integer using round toward +Infinity XX2-form (xvrspip XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRSPIZ, 0xfc0007fc, 0xf0000264, 0x1f0000, // VSX Vector Round to Single-Precision Integer using round toward Zero XX2-form (xvrspiz XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRSQRTEDP, 0xfc0007fc, 0xf0000328, 0x1f0000, // VSX Vector Reciprocal Square Root Estimate Double-Precision XX2-form (xvrsqrtedp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVRSQRTESP, 0xfc0007fc, 0xf0000228, 0x1f0000, // VSX Vector Reciprocal Square Root Estimate Single-Precision XX2-form (xvrsqrtesp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVSQRTDP, 0xfc0007fc, 0xf000032c, 0x1f0000, // VSX Vector Square Root Double-Precision XX2-form (xvsqrtdp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVSQRTSP, 0xfc0007fc, 0xf000022c, 0x1f0000, // VSX Vector Square Root Single-Precision XX2-form (xvsqrtsp XT,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
 	{XVSUBDP, 0xfc0007f8, 0xf0000340, 0x0, // VSX Vector Subtract Double-Precision XX3-form (xvsubdp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVSUBSP, 0xfc0007f8, 0xf0000240, 0x0, // VSX Vector Subtract Single-Precision XX3-form (xvsubsp XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVTDIVDP, 0xfc0007f8, 0xf00003e8, 0x600001, // VSX Vector Test for software Divide Double-Precision XX3-form (xvtdivdp BF,XA,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVTDIVSP, 0xfc0007f8, 0xf00002e8, 0x600001, // VSX Vector Test for software Divide Single-Precision XX3-form (xvtdivsp BF,XA,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XVTSQRTDP, 0xfc0007fc, 0xf00003a8, 0x7f0001, // VSX Vector Test for software Square Root Double-Precision XX2-form (xvtsqrtdp BF,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
 	{XVTSQRTSP, 0xfc0007fc, 0xf00002a8, 0x7f0001, // VSX Vector Test for software Square Root Single-Precision XX2-form (xvtsqrtsp BF,XB)
-		[5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
 	{XXLAND, 0xfc0007f8, 0xf0000410, 0x0, // VSX Vector Logical AND XX3-form (xxland XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXLANDC, 0xfc0007f8, 0xf0000450, 0x0, // VSX Vector Logical AND with Complement XX3-form (xxlandc XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXLNOR, 0xfc0007f8, 0xf0000510, 0x0, // VSX Vector Logical NOR XX3-form (xxlnor XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXLOR, 0xfc0007f8, 0xf0000490, 0x0, // VSX Vector Logical OR XX3-form (xxlor XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXLXOR, 0xfc0007f8, 0xf00004d0, 0x0, // VSX Vector Logical XOR XX3-form (xxlxor XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXMRGHW, 0xfc0007f8, 0xf0000090, 0x0, // VSX Vector Merge High Word XX3-form (xxmrghw XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXMRGLW, 0xfc0007f8, 0xf0000190, 0x0, // VSX Vector Merge Low Word XX3-form (xxmrglw XT,XA,XB)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
 	{XXPERMDI, 0xfc0004f8, 0xf0000050, 0x0, // VSX Vector Permute Doubleword Immediate XX3-form (xxpermdi XT,XA,XB,DM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_22_23}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_22_23}},
 	{XXSEL, 0xfc000030, 0xf0000030, 0x0, // VSX Vector Select XX4-form (xxsel XT,XA,XB,XC)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_VecSReg_28_28_21_25}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_VecSReg_28_28_21_25}},
 	{XXSLDWI, 0xfc0004f8, 0xf0000010, 0x0, // VSX Vector Shift Left Double by Word Immediate XX3-form (xxsldwi XT,XA,XB,SHW)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_22_23}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_22_23}},
 	{XXSPLTW, 0xfc0007fc, 0xf0000290, 0x1c0000, // VSX Vector Splat Word XX2-form (xxspltw XT,XB,UIM)
-		[5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_14_15}},
+		[6]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_14_15}},
 	{CMPB, 0xfc0007fe, 0x7c0003f8, 0x1, // Compare Bytes X-form (cmpb RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{DADD, 0xfc0007ff, 0xec000004, 0x0, // DFP Add X-form (dadd FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DADDCC, 0xfc0007ff, 0xec000005, 0x0, // DFP Add X-form (dadd. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DADDQ, 0xfc0007ff, 0xfc000004, 0x0, // DFP Add Quad X-form (daddq FRTp,FRAp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DADDQCC, 0xfc0007ff, 0xfc000005, 0x0, // DFP Add Quad X-form (daddq. FRTp,FRAp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DCFFIXQ, 0xfc0007ff, 0xfc000644, 0x1f0000, // DFP Convert From Fixed Quad X-form (dcffixq FRTp,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCFFIXQCC, 0xfc0007ff, 0xfc000645, 0x1f0000, // DFP Convert From Fixed Quad X-form (dcffixq. FRTp,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCMPO, 0xfc0007fe, 0xec000104, 0x600001, // DFP Compare Ordered X-form (dcmpo BF,FRA,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DCMPOQ, 0xfc0007fe, 0xfc000104, 0x600001, // DFP Compare Ordered Quad X-form (dcmpoq BF,FRAp,FRBp)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DCMPU, 0xfc0007fe, 0xec000504, 0x600001, // DFP Compare Unordered X-form (dcmpu BF,FRA,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DCMPUQ, 0xfc0007fe, 0xfc000504, 0x600001, // DFP Compare Unordered Quad X-form (dcmpuq BF,FRAp,FRBp)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DCTDP, 0xfc0007ff, 0xec000204, 0x1f0000, // DFP Convert To DFP Long X-form (dctdp FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCTDPCC, 0xfc0007ff, 0xec000205, 0x1f0000, // DFP Convert To DFP Long X-form (dctdp. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCTFIX, 0xfc0007ff, 0xec000244, 0x1f0000, // DFP Convert To Fixed X-form (dctfix FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCTFIXCC, 0xfc0007ff, 0xec000245, 0x1f0000, // DFP Convert To Fixed X-form (dctfix. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCTFIXQ, 0xfc0007ff, 0xfc000244, 0x1f0000, // DFP Convert To Fixed Quad X-form (dctfixq FRT,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCTFIXQCC, 0xfc0007ff, 0xfc000245, 0x1f0000, // DFP Convert To Fixed Quad X-form (dctfixq. FRT,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCTQPQ, 0xfc0007ff, 0xfc000204, 0x1f0000, // DFP Convert To DFP Extended X-form (dctqpq FRTp,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DCTQPQCC, 0xfc0007ff, 0xfc000205, 0x1f0000, // DFP Convert To DFP Extended X-form (dctqpq. FRTp,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DDEDPD, 0xfc0007ff, 0xec000284, 0x70000, // DFP Decode DPD To BCD X-form (ddedpd SP,FRT,FRB)
-		[5]*argField{ap_ImmUnsigned_11_12, ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_ImmUnsigned_11_12, ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DDEDPDCC, 0xfc0007ff, 0xec000285, 0x70000, // DFP Decode DPD To BCD X-form (ddedpd. SP,FRT,FRB)
-		[5]*argField{ap_ImmUnsigned_11_12, ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_ImmUnsigned_11_12, ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DDEDPDQ, 0xfc0007ff, 0xfc000284, 0x70000, // DFP Decode DPD To BCD Quad X-form (ddedpdq SP,FRTp,FRBp)
-		[5]*argField{ap_ImmUnsigned_11_12, ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_ImmUnsigned_11_12, ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DDEDPDQCC, 0xfc0007ff, 0xfc000285, 0x70000, // DFP Decode DPD To BCD Quad X-form (ddedpdq. SP,FRTp,FRBp)
-		[5]*argField{ap_ImmUnsigned_11_12, ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_ImmUnsigned_11_12, ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DDIV, 0xfc0007ff, 0xec000444, 0x0, // DFP Divide X-form (ddiv FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DDIVCC, 0xfc0007ff, 0xec000445, 0x0, // DFP Divide X-form (ddiv. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DDIVQ, 0xfc0007ff, 0xfc000444, 0x0, // DFP Divide Quad X-form (ddivq FRTp,FRAp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DDIVQCC, 0xfc0007ff, 0xfc000445, 0x0, // DFP Divide Quad X-form (ddivq. FRTp,FRAp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DENBCD, 0xfc0007ff, 0xec000684, 0xf0000, // DFP Encode BCD To DPD X-form (denbcd S,FRT,FRB)
-		[5]*argField{ap_ImmUnsigned_11_11, ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_ImmUnsigned_11_11, ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DENBCDCC, 0xfc0007ff, 0xec000685, 0xf0000, // DFP Encode BCD To DPD X-form (denbcd. S,FRT,FRB)
-		[5]*argField{ap_ImmUnsigned_11_11, ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_ImmUnsigned_11_11, ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DENBCDQ, 0xfc0007ff, 0xfc000684, 0xf0000, // DFP Encode BCD To DPD Quad X-form (denbcdq S,FRTp,FRBp)
-		[5]*argField{ap_ImmUnsigned_11_11, ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_ImmUnsigned_11_11, ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DENBCDQCC, 0xfc0007ff, 0xfc000685, 0xf0000, // DFP Encode BCD To DPD Quad X-form (denbcdq. S,FRTp,FRBp)
-		[5]*argField{ap_ImmUnsigned_11_11, ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_ImmUnsigned_11_11, ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DIEX, 0xfc0007ff, 0xec0006c4, 0x0, // DFP Insert Biased Exponent X-form (diex FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DIEXCC, 0xfc0007ff, 0xec0006c5, 0x0, // DFP Insert Biased Exponent X-form (diex. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DIEXQCC, 0xfc0007ff, 0xfc0006c5, 0x0, // DFP Insert Biased Exponent Quad X-form (diexq. FRTp,FRA,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DIEXQ, 0xfc0007fe, 0xfc0006c4, 0x0, // DFP Insert Biased Exponent Quad X-form (diexq FRTp,FRA,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DMUL, 0xfc0007ff, 0xec000044, 0x0, // DFP Multiply X-form (dmul FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DMULCC, 0xfc0007ff, 0xec000045, 0x0, // DFP Multiply X-form (dmul. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DMULQ, 0xfc0007ff, 0xfc000044, 0x0, // DFP Multiply Quad X-form (dmulq FRTp,FRAp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DMULQCC, 0xfc0007ff, 0xfc000045, 0x0, // DFP Multiply Quad X-form (dmulq. FRTp,FRAp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DQUA, 0xfc0001ff, 0xec000006, 0x0, // DFP Quantize Z23-form (dqua FRT,FRA,FRB,RMC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DQUACC, 0xfc0001ff, 0xec000007, 0x0, // DFP Quantize Z23-form (dqua. FRT,FRA,FRB,RMC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DQUAI, 0xfc0001ff, 0xec000086, 0x0, // DFP Quantize Immediate Z23-form (dquai TE,FRT,FRB,RMC)
-		[5]*argField{ap_ImmSigned_11_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmSigned_11_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DQUAICC, 0xfc0001ff, 0xec000087, 0x0, // DFP Quantize Immediate Z23-form (dquai. TE,FRT,FRB,RMC)
-		[5]*argField{ap_ImmSigned_11_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmSigned_11_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DQUAIQ, 0xfc0001ff, 0xfc000086, 0x0, // DFP Quantize Immediate Quad Z23-form (dquaiq TE,FRTp,FRBp,RMC)
-		[5]*argField{ap_ImmSigned_11_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmSigned_11_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DQUAIQCC, 0xfc0001ff, 0xfc000087, 0x0, // DFP Quantize Immediate Quad Z23-form (dquaiq. TE,FRTp,FRBp,RMC)
-		[5]*argField{ap_ImmSigned_11_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmSigned_11_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DQUAQ, 0xfc0001ff, 0xfc000006, 0x0, // DFP Quantize Quad Z23-form (dquaq FRTp,FRAp,FRBp,RMC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DQUAQCC, 0xfc0001ff, 0xfc000007, 0x0, // DFP Quantize Quad Z23-form (dquaq. FRTp,FRAp,FRBp,RMC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRDPQ, 0xfc0007ff, 0xfc000604, 0x1f0000, // DFP Round To DFP Long X-form (drdpq FRTp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DRDPQCC, 0xfc0007ff, 0xfc000605, 0x1f0000, // DFP Round To DFP Long X-form (drdpq. FRTp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DRINTN, 0xfc0001ff, 0xec0001c6, 0x1e0000, // DFP Round To FP Integer Without Inexact Z23-form (drintn R,FRT,FRB,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRINTNCC, 0xfc0001ff, 0xec0001c7, 0x1e0000, // DFP Round To FP Integer Without Inexact Z23-form (drintn. R,FRT,FRB,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRINTNQ, 0xfc0001ff, 0xfc0001c6, 0x1e0000, // DFP Round To FP Integer Without Inexact Quad Z23-form (drintnq R,FRTp,FRBp,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRINTNQCC, 0xfc0001ff, 0xfc0001c7, 0x1e0000, // DFP Round To FP Integer Without Inexact Quad Z23-form (drintnq. R,FRTp,FRBp,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRINTX, 0xfc0001ff, 0xec0000c6, 0x1e0000, // DFP Round To FP Integer With Inexact Z23-form (drintx R,FRT,FRB,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRINTXCC, 0xfc0001ff, 0xec0000c7, 0x1e0000, // DFP Round To FP Integer With Inexact Z23-form (drintx. R,FRT,FRB,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRINTXQ, 0xfc0001ff, 0xfc0000c6, 0x1e0000, // DFP Round To FP Integer With Inexact Quad Z23-form (drintxq R,FRTp,FRBp,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRINTXQCC, 0xfc0001ff, 0xfc0000c7, 0x1e0000, // DFP Round To FP Integer With Inexact Quad Z23-form (drintxq. R,FRTp,FRBp,RMC)
-		[5]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_ImmUnsigned_15_15, ap_FPReg_6_10, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRRND, 0xfc0001ff, 0xec000046, 0x0, // DFP Reround Z23-form (drrnd FRT,FRA,FRB,RMC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRRNDCC, 0xfc0001ff, 0xec000047, 0x0, // DFP Reround Z23-form (drrnd. FRT,FRA,FRB,RMC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRRNDQ, 0xfc0001ff, 0xfc000046, 0x0, // DFP Reround Quad Z23-form (drrndq FRTp,FRA,FRBp,RMC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRRNDQCC, 0xfc0001ff, 0xfc000047, 0x0, // DFP Reround Quad Z23-form (drrndq. FRTp,FRA,FRBp,RMC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20, ap_ImmUnsigned_21_22}},
 	{DRSP, 0xfc0007ff, 0xec000604, 0x1f0000, // DFP Round To DFP Short X-form (drsp FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DRSPCC, 0xfc0007ff, 0xec000605, 0x1f0000, // DFP Round To DFP Short X-form (drsp. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DSCLI, 0xfc0003ff, 0xec000084, 0x0, // DFP Shift Significand Left Immediate Z22-form (dscli FRT,FRA,SH)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DSCLICC, 0xfc0003ff, 0xec000085, 0x0, // DFP Shift Significand Left Immediate Z22-form (dscli. FRT,FRA,SH)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DSCLIQ, 0xfc0003ff, 0xfc000084, 0x0, // DFP Shift Significand Left Immediate Quad Z22-form (dscliq FRTp,FRAp,SH)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DSCLIQCC, 0xfc0003ff, 0xfc000085, 0x0, // DFP Shift Significand Left Immediate Quad Z22-form (dscliq. FRTp,FRAp,SH)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DSCRI, 0xfc0003ff, 0xec0000c4, 0x0, // DFP Shift Significand Right Immediate Z22-form (dscri FRT,FRA,SH)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DSCRICC, 0xfc0003ff, 0xec0000c5, 0x0, // DFP Shift Significand Right Immediate Z22-form (dscri. FRT,FRA,SH)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DSCRIQ, 0xfc0003ff, 0xfc0000c4, 0x0, // DFP Shift Significand Right Immediate Quad Z22-form (dscriq FRTp,FRAp,SH)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DSCRIQCC, 0xfc0003ff, 0xfc0000c5, 0x0, // DFP Shift Significand Right Immediate Quad Z22-form (dscriq. FRTp,FRAp,SH)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DSUB, 0xfc0007ff, 0xec000404, 0x0, // DFP Subtract X-form (dsub FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DSUBCC, 0xfc0007ff, 0xec000405, 0x0, // DFP Subtract X-form (dsub. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DSUBQ, 0xfc0007ff, 0xfc000404, 0x0, // DFP Subtract Quad X-form (dsubq FRTp,FRAp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DSUBQCC, 0xfc0007ff, 0xfc000405, 0x0, // DFP Subtract Quad X-form (dsubq. FRTp,FRAp,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DTSTDC, 0xfc0003fe, 0xec000184, 0x600001, // DFP Test Data Class Z22-form (dtstdc BF,FRA,DCM)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DTSTDCQ, 0xfc0003fe, 0xfc000184, 0x600001, // DFP Test Data Class Quad Z22-form (dtstdcq BF,FRAp,DCM)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DTSTDG, 0xfc0003fe, 0xec0001c4, 0x600001, // DFP Test Data Group Z22-form (dtstdg BF,FRA,DGM)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DTSTDGQ, 0xfc0003fe, 0xfc0001c4, 0x600001, // DFP Test Data Group Quad Z22-form (dtstdgq BF,FRAp,DGM)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
 	{DTSTEX, 0xfc0007fe, 0xec000144, 0x600001, // DFP Test Exponent X-form (dtstex BF,FRA,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DTSTEXQ, 0xfc0007fe, 0xfc000144, 0x600001, // DFP Test Exponent Quad X-form (dtstexq BF,FRAp,FRBp)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DTSTSF, 0xfc0007fe, 0xec000544, 0x600001, // DFP Test Significance X-form (dtstsf BF,FRA,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DTSTSFQ, 0xfc0007fe, 0xfc000544, 0x600001, // DFP Test Significance Quad X-form (dtstsfq BF,FRA,FRBp)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{DXEX, 0xfc0007ff, 0xec0002c4, 0x1f0000, // DFP Extract Biased Exponent X-form (dxex FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DXEXCC, 0xfc0007ff, 0xec0002c5, 0x1f0000, // DFP Extract Biased Exponent X-form (dxex. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DXEXQ, 0xfc0007ff, 0xfc0002c4, 0x1f0000, // DFP Extract Biased Exponent Quad X-form (dxexq FRT,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{DXEXQCC, 0xfc0007ff, 0xfc0002c5, 0x1f0000, // DFP Extract Biased Exponent Quad X-form (dxexq. FRT,FRBp)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCPSGN, 0xfc0007ff, 0xfc000010, 0x0, // Floating Copy Sign X-form (fcpsgn FRT, FRA, FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FCPSGNCC, 0xfc0007ff, 0xfc000011, 0x0, // Floating Copy Sign X-form (fcpsgn. FRT, FRA, FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{LBZCIX, 0xfc0007fe, 0x7c0006aa, 0x1, // Load Byte & Zero Caching Inhibited Indexed X-form (lbzcix RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LDCIX, 0xfc0007fe, 0x7c0006ea, 0x1, // Load Doubleword Caching Inhibited Indexed X-form (ldcix RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LFDP, 0xfc000003, 0xe4000000, 0x0, // Load Floating-Point Double Pair DS-form (lfdp FRTp,DS(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{LFDPX, 0xfc0007fe, 0x7c00062e, 0x1, // Load Floating-Point Double Pair Indexed X-form (lfdpx FRTp,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LFIWAX, 0xfc0007fe, 0x7c0006ae, 0x1, // Load Floating-Point as Integer Word Algebraic Indexed X-form (lfiwax FRT,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LHZCIX, 0xfc0007fe, 0x7c00066a, 0x1, // Load Halfword & Zero Caching Inhibited Indexed X-form (lhzcix RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LWZCIX, 0xfc0007fe, 0x7c00062a, 0x1, // Load Word & Zero Caching Inhibited Indexed X-form (lwzcix RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{PRTYD, 0xfc0007fe, 0x7c000174, 0xf801, // Parity Doubleword X-form (prtyd RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{PRTYW, 0xfc0007fe, 0x7c000134, 0xf801, // Parity Word X-form (prtyw RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{SLBFEECC, 0xfc0007ff, 0x7c0007a7, 0x1f0000, // SLB Find Entry ESID X-form (slbfee. RT,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_16_20}},
 	{STBCIX, 0xfc0007fe, 0x7c0007aa, 0x1, // Store Byte Caching Inhibited Indexed X-form (stbcix RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STDCIX, 0xfc0007fe, 0x7c0007ea, 0x1, // Store Doubleword Caching Inhibited Indexed X-form (stdcix RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STFDP, 0xfc000003, 0xf4000000, 0x0, // Store Floating-Point Double Pair DS-form (stfdp FRSp,DS(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{STFDPX, 0xfc0007fe, 0x7c00072e, 0x1, // Store Floating-Point Double Pair Indexed X-form (stfdpx FRSp,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STHCIX, 0xfc0007fe, 0x7c00076a, 0x1, // Store Halfword Caching Inhibited Indexed X-form (sthcix RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STWCIX, 0xfc0007fe, 0x7c00072a, 0x1, // Store Word Caching Inhibited Indexed X-form (stwcix RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ISEL, 0xfc00003e, 0x7c00001e, 0x1, // Integer Select A-form (isel RT,RA,RB,BC)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_CondRegBit_21_25}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_CondRegBit_21_25}},
 	{LVEBX, 0xfc0007fe, 0x7c00000e, 0x1, // Load Vector Element Byte Indexed X-form (lvebx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LVEHX, 0xfc0007fe, 0x7c00004e, 0x1, // Load Vector Element Halfword Indexed X-form (lvehx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LVEWX, 0xfc0007fe, 0x7c00008e, 0x1, // Load Vector Element Word Indexed X-form (lvewx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LVSL, 0xfc0007fe, 0x7c00000c, 0x1, // Load Vector for Shift Left Indexed X-form (lvsl VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LVSR, 0xfc0007fe, 0x7c00004c, 0x1, // Load Vector for Shift Right Indexed X-form (lvsr VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LVX, 0xfc0007fe, 0x7c0000ce, 0x1, // Load Vector Indexed X-form (lvx VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LVXL, 0xfc0007fe, 0x7c0002ce, 0x1, // Load Vector Indexed Last X-form (lvxl VRT,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MFVSCR, 0xfc0007ff, 0x10000604, 0x1ff800, // Move From Vector Status and Control Register VX-form (mfvscr VRT)
-		[5]*argField{ap_VecReg_6_10}},
+		[6]*argField{ap_VecReg_6_10}},
 	{MTVSCR, 0xfc0007ff, 0x10000644, 0x3ff0000, // Move To Vector Status and Control Register VX-form (mtvscr VRB)
-		[5]*argField{ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_16_20}},
 	{STVEBX, 0xfc0007fe, 0x7c00010e, 0x1, // Store Vector Element Byte Indexed X-form (stvebx VRS,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STVEHX, 0xfc0007fe, 0x7c00014e, 0x1, // Store Vector Element Halfword Indexed X-form (stvehx VRS,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STVEWX, 0xfc0007fe, 0x7c00018e, 0x1, // Store Vector Element Word Indexed X-form (stvewx VRS,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STVX, 0xfc0007fe, 0x7c0001ce, 0x1, // Store Vector Indexed X-form (stvx VRS,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STVXL, 0xfc0007fe, 0x7c0003ce, 0x1, // Store Vector Indexed Last X-form (stvxl VRS,RA,RB)
-		[5]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{TLBIEL, 0xfc0007fe, 0x7c000224, 0x100001, // TLB Invalidate Entry Local X-form (tlbiel RB,RS,RIC,PRS,R)
-		[5]*argField{ap_Reg_16_20, ap_Reg_6_10, ap_ImmUnsigned_12_13, ap_ImmUnsigned_14_14, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_16_20, ap_Reg_6_10, ap_ImmUnsigned_12_13, ap_ImmUnsigned_14_14, ap_ImmUnsigned_15_15}},
 	{VADDCUW, 0xfc0007ff, 0x10000180, 0x0, // Vector Add & write Carry Unsigned Word VX-form (vaddcuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDFP, 0xfc0007ff, 0x1000000a, 0x0, // Vector Add Floating-Point VX-form (vaddfp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDSBS, 0xfc0007ff, 0x10000300, 0x0, // Vector Add Signed Byte Saturate VX-form (vaddsbs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDSHS, 0xfc0007ff, 0x10000340, 0x0, // Vector Add Signed Halfword Saturate VX-form (vaddshs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDSWS, 0xfc0007ff, 0x10000380, 0x0, // Vector Add Signed Word Saturate VX-form (vaddsws VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDUBM, 0xfc0007ff, 0x10000000, 0x0, // Vector Add Unsigned Byte Modulo VX-form (vaddubm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDUBS, 0xfc0007ff, 0x10000200, 0x0, // Vector Add Unsigned Byte Saturate VX-form (vaddubs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDUHM, 0xfc0007ff, 0x10000040, 0x0, // Vector Add Unsigned Halfword Modulo VX-form (vadduhm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDUHS, 0xfc0007ff, 0x10000240, 0x0, // Vector Add Unsigned Halfword Saturate VX-form (vadduhs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDUWM, 0xfc0007ff, 0x10000080, 0x0, // Vector Add Unsigned Word Modulo VX-form (vadduwm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VADDUWS, 0xfc0007ff, 0x10000280, 0x0, // Vector Add Unsigned Word Saturate VX-form (vadduws VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VAND, 0xfc0007ff, 0x10000404, 0x0, // Vector Logical AND VX-form (vand VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VANDC, 0xfc0007ff, 0x10000444, 0x0, // Vector Logical AND with Complement VX-form (vandc VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VAVGSB, 0xfc0007ff, 0x10000502, 0x0, // Vector Average Signed Byte VX-form (vavgsb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VAVGSH, 0xfc0007ff, 0x10000542, 0x0, // Vector Average Signed Halfword VX-form (vavgsh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VAVGSW, 0xfc0007ff, 0x10000582, 0x0, // Vector Average Signed Word VX-form (vavgsw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VAVGUB, 0xfc0007ff, 0x10000402, 0x0, // Vector Average Unsigned Byte VX-form (vavgub VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VAVGUH, 0xfc0007ff, 0x10000442, 0x0, // Vector Average Unsigned Halfword VX-form (vavguh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VAVGUW, 0xfc0007ff, 0x10000482, 0x0, // Vector Average Unsigned Word VX-form (vavguw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCFSX, 0xfc0007ff, 0x1000034a, 0x0, // Vector Convert with round to nearest From Signed Word to floating-point format VX-form (vcfsx VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
 	{VCFUX, 0xfc0007ff, 0x1000030a, 0x0, // Vector Convert with round to nearest From Unsigned Word to floating-point format VX-form (vcfux VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
 	{VCMPBFP, 0xfc0007ff, 0x100003c6, 0x0, // Vector Compare Bounds Floating-Point VC-form (vcmpbfp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPBFPCC, 0xfc0007ff, 0x100007c6, 0x0, // Vector Compare Bounds Floating-Point VC-form (vcmpbfp. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQFP, 0xfc0007ff, 0x100000c6, 0x0, // Vector Compare Equal Floating-Point VC-form (vcmpeqfp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQFPCC, 0xfc0007ff, 0x100004c6, 0x0, // Vector Compare Equal Floating-Point VC-form (vcmpeqfp. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUB, 0xfc0007ff, 0x10000006, 0x0, // Vector Compare Equal Unsigned Byte VC-form (vcmpequb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUBCC, 0xfc0007ff, 0x10000406, 0x0, // Vector Compare Equal Unsigned Byte VC-form (vcmpequb. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUH, 0xfc0007ff, 0x10000046, 0x0, // Vector Compare Equal Unsigned Halfword VC-form (vcmpequh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUHCC, 0xfc0007ff, 0x10000446, 0x0, // Vector Compare Equal Unsigned Halfword VC-form (vcmpequh. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUW, 0xfc0007ff, 0x10000086, 0x0, // Vector Compare Equal Unsigned Word VC-form (vcmpequw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPEQUWCC, 0xfc0007ff, 0x10000486, 0x0, // Vector Compare Equal Unsigned Word VC-form (vcmpequw. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGEFP, 0xfc0007ff, 0x100001c6, 0x0, // Vector Compare Greater Than or Equal Floating-Point VC-form (vcmpgefp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGEFPCC, 0xfc0007ff, 0x100005c6, 0x0, // Vector Compare Greater Than or Equal Floating-Point VC-form (vcmpgefp. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTFP, 0xfc0007ff, 0x100002c6, 0x0, // Vector Compare Greater Than Floating-Point VC-form (vcmpgtfp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTFPCC, 0xfc0007ff, 0x100006c6, 0x0, // Vector Compare Greater Than Floating-Point VC-form (vcmpgtfp. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSB, 0xfc0007ff, 0x10000306, 0x0, // Vector Compare Greater Than Signed Byte VC-form (vcmpgtsb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSBCC, 0xfc0007ff, 0x10000706, 0x0, // Vector Compare Greater Than Signed Byte VC-form (vcmpgtsb. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSH, 0xfc0007ff, 0x10000346, 0x0, // Vector Compare Greater Than Signed Halfword VC-form (vcmpgtsh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSHCC, 0xfc0007ff, 0x10000746, 0x0, // Vector Compare Greater Than Signed Halfword VC-form (vcmpgtsh. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSW, 0xfc0007ff, 0x10000386, 0x0, // Vector Compare Greater Than Signed Word VC-form (vcmpgtsw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTSWCC, 0xfc0007ff, 0x10000786, 0x0, // Vector Compare Greater Than Signed Word VC-form (vcmpgtsw. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUB, 0xfc0007ff, 0x10000206, 0x0, // Vector Compare Greater Than Unsigned Byte VC-form (vcmpgtub VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUBCC, 0xfc0007ff, 0x10000606, 0x0, // Vector Compare Greater Than Unsigned Byte VC-form (vcmpgtub. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUH, 0xfc0007ff, 0x10000246, 0x0, // Vector Compare Greater Than Unsigned Halfword VC-form (vcmpgtuh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUHCC, 0xfc0007ff, 0x10000646, 0x0, // Vector Compare Greater Than Unsigned Halfword VC-form (vcmpgtuh. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUW, 0xfc0007ff, 0x10000286, 0x0, // Vector Compare Greater Than Unsigned Word VC-form (vcmpgtuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCMPGTUWCC, 0xfc0007ff, 0x10000686, 0x0, // Vector Compare Greater Than Unsigned Word VC-form (vcmpgtuw. VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VCTSXS, 0xfc0007ff, 0x100003ca, 0x0, // Vector Convert with round to zero from floating-point To Signed Word format Saturate VX-form (vctsxs VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
 	{VCTUXS, 0xfc0007ff, 0x1000038a, 0x0, // Vector Convert with round to zero from floating-point To Unsigned Word format Saturate VX-form (vctuxs VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_11_15}},
 	{VEXPTEFP, 0xfc0007ff, 0x1000018a, 0x1f0000, // Vector 2 Raised to the Exponent Estimate Floating-Point VX-form (vexptefp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VLOGEFP, 0xfc0007ff, 0x100001ca, 0x1f0000, // Vector Log Base 2 Estimate Floating-Point VX-form (vlogefp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VMADDFP, 0xfc00003f, 0x1000002e, 0x0, // Vector Multiply-Add Floating-Point VA-form (vmaddfp VRT,VRA,VRC,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_21_25, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_21_25, ap_VecReg_16_20}},
 	{VMAXFP, 0xfc0007ff, 0x1000040a, 0x0, // Vector Maximum Floating-Point VX-form (vmaxfp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMAXSB, 0xfc0007ff, 0x10000102, 0x0, // Vector Maximum Signed Byte VX-form (vmaxsb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMAXSH, 0xfc0007ff, 0x10000142, 0x0, // Vector Maximum Signed Halfword VX-form (vmaxsh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMAXSW, 0xfc0007ff, 0x10000182, 0x0, // Vector Maximum Signed Word VX-form (vmaxsw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMAXUB, 0xfc0007ff, 0x10000002, 0x0, // Vector Maximum Unsigned Byte VX-form (vmaxub VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMAXUH, 0xfc0007ff, 0x10000042, 0x0, // Vector Maximum Unsigned Halfword VX-form (vmaxuh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMAXUW, 0xfc0007ff, 0x10000082, 0x0, // Vector Maximum Unsigned Word VX-form (vmaxuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMHADDSHS, 0xfc00003f, 0x10000020, 0x0, // Vector Multiply-High-Add Signed Halfword Saturate VA-form (vmhaddshs VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMHRADDSHS, 0xfc00003f, 0x10000021, 0x0, // Vector Multiply-High-Round-Add Signed Halfword Saturate VA-form (vmhraddshs VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMINFP, 0xfc0007ff, 0x1000044a, 0x0, // Vector Minimum Floating-Point VX-form (vminfp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMINSB, 0xfc0007ff, 0x10000302, 0x0, // Vector Minimum Signed Byte VX-form (vminsb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMINSH, 0xfc0007ff, 0x10000342, 0x0, // Vector Minimum Signed Halfword VX-form (vminsh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMINSW, 0xfc0007ff, 0x10000382, 0x0, // Vector Minimum Signed Word VX-form (vminsw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMINUB, 0xfc0007ff, 0x10000202, 0x0, // Vector Minimum Unsigned Byte VX-form (vminub VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMINUH, 0xfc0007ff, 0x10000242, 0x0, // Vector Minimum Unsigned Halfword VX-form (vminuh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMINUW, 0xfc0007ff, 0x10000282, 0x0, // Vector Minimum Unsigned Word VX-form (vminuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMLADDUHM, 0xfc00003f, 0x10000022, 0x0, // Vector Multiply-Low-Add Unsigned Halfword Modulo VA-form (vmladduhm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMRGHB, 0xfc0007ff, 0x1000000c, 0x0, // Vector Merge High Byte VX-form (vmrghb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMRGHH, 0xfc0007ff, 0x1000004c, 0x0, // Vector Merge High Halfword VX-form (vmrghh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMRGHW, 0xfc0007ff, 0x1000008c, 0x0, // Vector Merge High Word VX-form (vmrghw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMRGLB, 0xfc0007ff, 0x1000010c, 0x0, // Vector Merge Low Byte VX-form (vmrglb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMRGLH, 0xfc0007ff, 0x1000014c, 0x0, // Vector Merge Low Halfword VX-form (vmrglh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMRGLW, 0xfc0007ff, 0x1000018c, 0x0, // Vector Merge Low Word VX-form (vmrglw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMSUMMBM, 0xfc00003f, 0x10000025, 0x0, // Vector Multiply-Sum Mixed Byte Modulo VA-form (vmsummbm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMSUMSHM, 0xfc00003f, 0x10000028, 0x0, // Vector Multiply-Sum Signed Halfword Modulo VA-form (vmsumshm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMSUMSHS, 0xfc00003f, 0x10000029, 0x0, // Vector Multiply-Sum Signed Halfword Saturate VA-form (vmsumshs VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMSUMUBM, 0xfc00003f, 0x10000024, 0x0, // Vector Multiply-Sum Unsigned Byte Modulo VA-form (vmsumubm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMSUMUHM, 0xfc00003f, 0x10000026, 0x0, // Vector Multiply-Sum Unsigned Halfword Modulo VA-form (vmsumuhm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMSUMUHS, 0xfc00003f, 0x10000027, 0x0, // Vector Multiply-Sum Unsigned Halfword Saturate VA-form (vmsumuhs VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VMULESB, 0xfc0007ff, 0x10000308, 0x0, // Vector Multiply Even Signed Byte VX-form (vmulesb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULESH, 0xfc0007ff, 0x10000348, 0x0, // Vector Multiply Even Signed Halfword VX-form (vmulesh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULEUB, 0xfc0007ff, 0x10000208, 0x0, // Vector Multiply Even Unsigned Byte VX-form (vmuleub VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULEUH, 0xfc0007ff, 0x10000248, 0x0, // Vector Multiply Even Unsigned Halfword VX-form (vmuleuh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULOSB, 0xfc0007ff, 0x10000108, 0x0, // Vector Multiply Odd Signed Byte VX-form (vmulosb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULOSH, 0xfc0007ff, 0x10000148, 0x0, // Vector Multiply Odd Signed Halfword VX-form (vmulosh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULOUB, 0xfc0007ff, 0x10000008, 0x0, // Vector Multiply Odd Unsigned Byte VX-form (vmuloub VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VMULOUH, 0xfc0007ff, 0x10000048, 0x0, // Vector Multiply Odd Unsigned Halfword VX-form (vmulouh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VNMSUBFP, 0xfc00003f, 0x1000002f, 0x0, // Vector Negative Multiply-Subtract Floating-Point VA-form (vnmsubfp VRT,VRA,VRC,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_21_25, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_21_25, ap_VecReg_16_20}},
 	{VNOR, 0xfc0007ff, 0x10000504, 0x0, // Vector Logical NOR VX-form (vnor VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VOR, 0xfc0007ff, 0x10000484, 0x0, // Vector Logical OR VX-form (vor VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPERM, 0xfc00003f, 0x1000002b, 0x0, // Vector Permute VA-form (vperm VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VPKPX, 0xfc0007ff, 0x1000030e, 0x0, // Vector Pack Pixel VX-form (vpkpx VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKSHSS, 0xfc0007ff, 0x1000018e, 0x0, // Vector Pack Signed Halfword Signed Saturate VX-form (vpkshss VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKSHUS, 0xfc0007ff, 0x1000010e, 0x0, // Vector Pack Signed Halfword Unsigned Saturate VX-form (vpkshus VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKSWSS, 0xfc0007ff, 0x100001ce, 0x0, // Vector Pack Signed Word Signed Saturate VX-form (vpkswss VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKSWUS, 0xfc0007ff, 0x1000014e, 0x0, // Vector Pack Signed Word Unsigned Saturate VX-form (vpkswus VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKUHUM, 0xfc0007ff, 0x1000000e, 0x0, // Vector Pack Unsigned Halfword Unsigned Modulo VX-form (vpkuhum VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKUHUS, 0xfc0007ff, 0x1000008e, 0x0, // Vector Pack Unsigned Halfword Unsigned Saturate VX-form (vpkuhus VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKUWUM, 0xfc0007ff, 0x1000004e, 0x0, // Vector Pack Unsigned Word Unsigned Modulo VX-form (vpkuwum VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VPKUWUS, 0xfc0007ff, 0x100000ce, 0x0, // Vector Pack Unsigned Word Unsigned Saturate VX-form (vpkuwus VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VREFP, 0xfc0007ff, 0x1000010a, 0x1f0000, // Vector Reciprocal Estimate Floating-Point VX-form (vrefp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VRFIM, 0xfc0007ff, 0x100002ca, 0x1f0000, // Vector Round to Floating-Point Integer toward -Infinity VX-form (vrfim VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VRFIN, 0xfc0007ff, 0x1000020a, 0x1f0000, // Vector Round to Floating-Point Integer Nearest VX-form (vrfin VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VRFIP, 0xfc0007ff, 0x1000028a, 0x1f0000, // Vector Round to Floating-Point Integer toward +Infinity VX-form (vrfip VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VRFIZ, 0xfc0007ff, 0x1000024a, 0x1f0000, // Vector Round to Floating-Point Integer toward Zero VX-form (vrfiz VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VRLB, 0xfc0007ff, 0x10000004, 0x0, // Vector Rotate Left Byte VX-form (vrlb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRLH, 0xfc0007ff, 0x10000044, 0x0, // Vector Rotate Left Halfword VX-form (vrlh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRLW, 0xfc0007ff, 0x10000084, 0x0, // Vector Rotate Left Word VX-form (vrlw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VRSQRTEFP, 0xfc0007ff, 0x1000014a, 0x1f0000, // Vector Reciprocal Square Root Estimate Floating-Point VX-form (vrsqrtefp VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VSEL, 0xfc00003f, 0x1000002a, 0x0, // Vector Select VA-form (vsel VRT,VRA,VRB,VRC)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_VecReg_21_25}},
 	{VSL, 0xfc0007ff, 0x100001c4, 0x0, // Vector Shift Left VX-form (vsl VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSLB, 0xfc0007ff, 0x10000104, 0x0, // Vector Shift Left Byte VX-form (vslb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSLDOI, 0xfc00003f, 0x1000002c, 0x400, // Vector Shift Left Double by Octet Immediate VA-form (vsldoi VRT,VRA,VRB,SHB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_25}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20, ap_ImmUnsigned_22_25}},
 	{VSLH, 0xfc0007ff, 0x10000144, 0x0, // Vector Shift Left Halfword VX-form (vslh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSLO, 0xfc0007ff, 0x1000040c, 0x0, // Vector Shift Left by Octet VX-form (vslo VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSLW, 0xfc0007ff, 0x10000184, 0x0, // Vector Shift Left Word VX-form (vslw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSPLTB, 0xfc0007ff, 0x1000020c, 0x100000, // Vector Splat Byte VX-form (vspltb VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_12_15}},
 	{VSPLTH, 0xfc0007ff, 0x1000024c, 0x180000, // Vector Splat Halfword VX-form (vsplth VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_13_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_13_15}},
 	{VSPLTISB, 0xfc0007ff, 0x1000030c, 0xf800, // Vector Splat Immediate Signed Byte VX-form (vspltisb VRT,SIM)
-		[5]*argField{ap_VecReg_6_10, ap_ImmSigned_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_ImmSigned_11_15}},
 	{VSPLTISH, 0xfc0007ff, 0x1000034c, 0xf800, // Vector Splat Immediate Signed Halfword VX-form (vspltish VRT,SIM)
-		[5]*argField{ap_VecReg_6_10, ap_ImmSigned_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_ImmSigned_11_15}},
 	{VSPLTISW, 0xfc0007ff, 0x1000038c, 0xf800, // Vector Splat Immediate Signed Word VX-form (vspltisw VRT,SIM)
-		[5]*argField{ap_VecReg_6_10, ap_ImmSigned_11_15}},
+		[6]*argField{ap_VecReg_6_10, ap_ImmSigned_11_15}},
 	{VSPLTW, 0xfc0007ff, 0x1000028c, 0x1c0000, // Vector Splat Word VX-form (vspltw VRT,VRB,UIM)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_14_15}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20, ap_ImmUnsigned_14_15}},
 	{VSR, 0xfc0007ff, 0x100002c4, 0x0, // Vector Shift Right VX-form (vsr VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRAB, 0xfc0007ff, 0x10000304, 0x0, // Vector Shift Right Algebraic Byte VX-form (vsrab VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRAH, 0xfc0007ff, 0x10000344, 0x0, // Vector Shift Right Algebraic Halfword VX-form (vsrah VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRAW, 0xfc0007ff, 0x10000384, 0x0, // Vector Shift Right Algebraic Word VX-form (vsraw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRB, 0xfc0007ff, 0x10000204, 0x0, // Vector Shift Right Byte VX-form (vsrb VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRH, 0xfc0007ff, 0x10000244, 0x0, // Vector Shift Right Halfword VX-form (vsrh VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRO, 0xfc0007ff, 0x1000044c, 0x0, // Vector Shift Right by Octet VX-form (vsro VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSRW, 0xfc0007ff, 0x10000284, 0x0, // Vector Shift Right Word VX-form (vsrw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBCUW, 0xfc0007ff, 0x10000580, 0x0, // Vector Subtract & Write Carry-out Unsigned Word VX-form (vsubcuw VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBFP, 0xfc0007ff, 0x1000004a, 0x0, // Vector Subtract Floating-Point VX-form (vsubfp VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBSBS, 0xfc0007ff, 0x10000700, 0x0, // Vector Subtract Signed Byte Saturate VX-form (vsubsbs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBSHS, 0xfc0007ff, 0x10000740, 0x0, // Vector Subtract Signed Halfword Saturate VX-form (vsubshs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBSWS, 0xfc0007ff, 0x10000780, 0x0, // Vector Subtract Signed Word Saturate VX-form (vsubsws VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBUBM, 0xfc0007ff, 0x10000400, 0x0, // Vector Subtract Unsigned Byte Modulo VX-form (vsububm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBUBS, 0xfc0007ff, 0x10000600, 0x0, // Vector Subtract Unsigned Byte Saturate VX-form (vsububs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBUHM, 0xfc0007ff, 0x10000440, 0x0, // Vector Subtract Unsigned Halfword Modulo VX-form (vsubuhm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBUHS, 0xfc0007ff, 0x10000640, 0x0, // Vector Subtract Unsigned Halfword Saturate VX-form (vsubuhs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBUWM, 0xfc0007ff, 0x10000480, 0x0, // Vector Subtract Unsigned Word Modulo VX-form (vsubuwm VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUBUWS, 0xfc0007ff, 0x10000680, 0x0, // Vector Subtract Unsigned Word Saturate VX-form (vsubuws VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUM2SWS, 0xfc0007ff, 0x10000688, 0x0, // Vector Sum across Half Signed Word Saturate VX-form (vsum2sws VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUM4SBS, 0xfc0007ff, 0x10000708, 0x0, // Vector Sum across Quarter Signed Byte Saturate VX-form (vsum4sbs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUM4SHS, 0xfc0007ff, 0x10000648, 0x0, // Vector Sum across Quarter Signed Halfword Saturate VX-form (vsum4shs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUM4UBS, 0xfc0007ff, 0x10000608, 0x0, // Vector Sum across Quarter Unsigned Byte Saturate VX-form (vsum4ubs VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VSUMSWS, 0xfc0007ff, 0x10000788, 0x0, // Vector Sum across Signed Word Saturate VX-form (vsumsws VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{VUPKHPX, 0xfc0007ff, 0x1000034e, 0x1f0000, // Vector Unpack High Pixel VX-form (vupkhpx VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VUPKHSB, 0xfc0007ff, 0x1000020e, 0x1f0000, // Vector Unpack High Signed Byte VX-form (vupkhsb VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VUPKHSH, 0xfc0007ff, 0x1000024e, 0x1f0000, // Vector Unpack High Signed Halfword VX-form (vupkhsh VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VUPKLPX, 0xfc0007ff, 0x100003ce, 0x1f0000, // Vector Unpack Low Pixel VX-form (vupklpx VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VUPKLSB, 0xfc0007ff, 0x1000028e, 0x1f0000, // Vector Unpack Low Signed Byte VX-form (vupklsb VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VUPKLSH, 0xfc0007ff, 0x100002ce, 0x1f0000, // Vector Unpack Low Signed Halfword VX-form (vupklsh VRT,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_16_20}},
 	{VXOR, 0xfc0007ff, 0x100004c4, 0x0, // Vector Logical XOR VX-form (vxor VRT,VRA,VRB)
-		[5]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
+		[6]*argField{ap_VecReg_6_10, ap_VecReg_11_15, ap_VecReg_16_20}},
 	{FRE, 0xfc00003f, 0xfc000030, 0x1f07c0, // Floating Reciprocal Estimate A-form (fre FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRECC, 0xfc00003f, 0xfc000031, 0x1f07c0, // Floating Reciprocal Estimate A-form (fre. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRIM, 0xfc0007ff, 0xfc0003d0, 0x1f0000, // Floating Round to Integer Minus X-form (frim FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRIMCC, 0xfc0007ff, 0xfc0003d1, 0x1f0000, // Floating Round to Integer Minus X-form (frim. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRIN, 0xfc0007ff, 0xfc000310, 0x1f0000, // Floating Round to Integer Nearest X-form (frin FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRINCC, 0xfc0007ff, 0xfc000311, 0x1f0000, // Floating Round to Integer Nearest X-form (frin. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRIP, 0xfc0007ff, 0xfc000390, 0x1f0000, // Floating Round to Integer Plus X-form (frip FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRIPCC, 0xfc0007ff, 0xfc000391, 0x1f0000, // Floating Round to Integer Plus X-form (frip. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRIZ, 0xfc0007ff, 0xfc000350, 0x1f0000, // Floating Round to Integer Toward Zero X-form (friz FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRIZCC, 0xfc0007ff, 0xfc000351, 0x1f0000, // Floating Round to Integer Toward Zero X-form (friz. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRSQRTES, 0xfc00003f, 0xec000034, 0x1f07c0, // Floating Reciprocal Square Root Estimate Single A-form (frsqrtes FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRSQRTESCC, 0xfc00003f, 0xec000035, 0x1f07c0, // Floating Reciprocal Square Root Estimate Single A-form (frsqrtes. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{HRFID, 0xfc0007fe, 0x4c000224, 0x3fff801, // Return From Interrupt Doubleword Hypervisor XL-form (hrfid)
-		[5]*argField{}},
+		[6]*argField{}},
 	{POPCNTB, 0xfc0007fe, 0x7c0000f4, 0xf801, // Population Count Bytes X-form (popcntb RA, RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{MFOCRF, 0xfc1007fe, 0x7c100026, 0x801, // Move From One Condition Register Field XFX-form (mfocrf RT,FXM)
-		[5]*argField{ap_Reg_6_10, ap_ImmUnsigned_12_19}},
+		[6]*argField{ap_Reg_6_10, ap_ImmUnsigned_12_19}},
 	{MTOCRF, 0xfc1007fe, 0x7c100120, 0x801, // Move To One Condition Register Field XFX-form (mtocrf FXM,RS)
-		[5]*argField{ap_ImmUnsigned_12_19, ap_Reg_6_10}},
+		[6]*argField{ap_ImmUnsigned_12_19, ap_Reg_6_10}},
 	{SLBMFEE, 0xfc0007fe, 0x7c000726, 0x1e0001, // SLB Move From Entry ESID X-form (slbmfee RT,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_16_20}},
 	{SLBMFEV, 0xfc0007fe, 0x7c0006a6, 0x1e0001, // SLB Move From Entry VSID X-form (slbmfev RT,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_16_20}},
 	{SLBMTE, 0xfc0007fe, 0x7c000324, 0x1f0001, // SLB Move To Entry X-form (slbmte RS,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_16_20}},
 	{RFSCV, 0xfc0007fe, 0x4c0000a4, 0x3fff801, // Return From System Call Vectored XL-form (rfscv)
-		[5]*argField{}},
+		[6]*argField{}},
 	{SCV, 0xfc000003, 0x44000001, 0x3fff01c, // System Call Vectored SC-form (scv LEV)
-		[5]*argField{ap_ImmUnsigned_20_26}},
+		[6]*argField{ap_ImmUnsigned_20_26}},
 	{LQ, 0xfc000000, 0xe0000000, 0xf, // Load Quadword DQ-form (lq RTp,DQ(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_27_shift4, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_27_shift4, ap_Reg_11_15}},
 	{STQ, 0xfc000003, 0xf8000002, 0x0, // Store Quadword DS-form (stq RSp,DS(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{CNTLZD, 0xfc0007ff, 0x7c000074, 0xf800, // Count Leading Zeros Doubleword X-form (cntlzd RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{CNTLZDCC, 0xfc0007ff, 0x7c000075, 0xf800, // Count Leading Zeros Doubleword X-form (cntlzd. RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{DCBF, 0xfc0007fe, 0x7c0000ac, 0x3000001, // Data Cache Block Flush X-form (dcbf RA,RB,L)
-		[5]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_8_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_8_10}},
 	{DCBST, 0xfc0007fe, 0x7c00006c, 0x3e00001, // Data Cache Block Store X-form (dcbst RA,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_16_20}},
 	{DCBT, 0xfc0007fe, 0x7c00022c, 0x1, // Data Cache Block Touch X-form (dcbt RA,RB,TH)
-		[5]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_6_10}},
 	{DCBTST, 0xfc0007fe, 0x7c0001ec, 0x1, // Data Cache Block Touch for Store X-form (dcbtst RA,RB,TH)
-		[5]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_6_10}},
 	{DIVD, 0xfc0007ff, 0x7c0003d2, 0x0, // Divide Doubleword XO-form (divd RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDCC, 0xfc0007ff, 0x7c0003d3, 0x0, // Divide Doubleword XO-form (divd. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDO, 0xfc0007ff, 0x7c0007d2, 0x0, // Divide Doubleword XO-form (divdo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDOCC, 0xfc0007ff, 0x7c0007d3, 0x0, // Divide Doubleword XO-form (divdo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDU, 0xfc0007ff, 0x7c000392, 0x0, // Divide Doubleword Unsigned XO-form (divdu RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDUCC, 0xfc0007ff, 0x7c000393, 0x0, // Divide Doubleword Unsigned XO-form (divdu. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDUO, 0xfc0007ff, 0x7c000792, 0x0, // Divide Doubleword Unsigned XO-form (divduo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVDUOCC, 0xfc0007ff, 0x7c000793, 0x0, // Divide Doubleword Unsigned XO-form (divduo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVW, 0xfc0007ff, 0x7c0003d6, 0x0, // Divide Word XO-form (divw RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWCC, 0xfc0007ff, 0x7c0003d7, 0x0, // Divide Word XO-form (divw. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWO, 0xfc0007ff, 0x7c0007d6, 0x0, // Divide Word XO-form (divwo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWOCC, 0xfc0007ff, 0x7c0007d7, 0x0, // Divide Word XO-form (divwo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWU, 0xfc0007ff, 0x7c000396, 0x0, // Divide Word Unsigned XO-form (divwu RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWUCC, 0xfc0007ff, 0x7c000397, 0x0, // Divide Word Unsigned XO-form (divwu. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWUO, 0xfc0007ff, 0x7c000796, 0x0, // Divide Word Unsigned XO-form (divwuo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{DIVWUOCC, 0xfc0007ff, 0x7c000797, 0x0, // Divide Word Unsigned XO-form (divwuo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{EIEIO, 0xfc0007fe, 0x7c0006ac, 0x3fff801, // Enforce In-order Execution of I/O X-form (eieio)
-		[5]*argField{}},
+		[6]*argField{}},
 	{EXTSB, 0xfc0007ff, 0x7c000774, 0xf800, // Extend Sign Byte X-form (extsb RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{EXTSBCC, 0xfc0007ff, 0x7c000775, 0xf800, // Extend Sign Byte X-form (extsb. RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{EXTSW, 0xfc0007ff, 0x7c0007b4, 0xf800, // Extend Sign Word X-form (extsw RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{EXTSWCC, 0xfc0007ff, 0x7c0007b5, 0xf800, // Extend Sign Word X-form (extsw. RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{FADDS, 0xfc00003f, 0xec00002a, 0x7c0, // Floating Add Single A-form (fadds FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FADDSCC, 0xfc00003f, 0xec00002b, 0x7c0, // Floating Add Single A-form (fadds. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FCFID, 0xfc0007ff, 0xfc00069c, 0x1f0000, // Floating Convert with round Signed Doubleword to Double-Precision format X-form (fcfid FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCFIDCC, 0xfc0007ff, 0xfc00069d, 0x1f0000, // Floating Convert with round Signed Doubleword to Double-Precision format X-form (fcfid. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTID, 0xfc0007ff, 0xfc00065c, 0x1f0000, // Floating Convert with round Double-Precision To Signed Doubleword format X-form (fctid FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIDCC, 0xfc0007ff, 0xfc00065d, 0x1f0000, // Floating Convert with round Double-Precision To Signed Doubleword format X-form (fctid. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIDZ, 0xfc0007ff, 0xfc00065e, 0x1f0000, // Floating Convert with truncate Double-Precision To Signed Doubleword format X-form (fctidz FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIDZCC, 0xfc0007ff, 0xfc00065f, 0x1f0000, // Floating Convert with truncate Double-Precision To Signed Doubleword format X-form (fctidz. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FDIVS, 0xfc00003f, 0xec000024, 0x7c0, // Floating Divide Single A-form (fdivs FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FDIVSCC, 0xfc00003f, 0xec000025, 0x7c0, // Floating Divide Single A-form (fdivs. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FMADDS, 0xfc00003f, 0xec00003a, 0x0, // Floating Multiply-Add Single A-form (fmadds FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FMADDSCC, 0xfc00003f, 0xec00003b, 0x0, // Floating Multiply-Add Single A-form (fmadds. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FMSUBS, 0xfc00003f, 0xec000038, 0x0, // Floating Multiply-Subtract Single A-form (fmsubs FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FMSUBSCC, 0xfc00003f, 0xec000039, 0x0, // Floating Multiply-Subtract Single A-form (fmsubs. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FMULS, 0xfc00003f, 0xec000032, 0xf800, // Floating Multiply Single A-form (fmuls FRT,FRA,FRC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25}},
 	{FMULSCC, 0xfc00003f, 0xec000033, 0xf800, // Floating Multiply Single A-form (fmuls. FRT,FRA,FRC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25}},
 	{FNMADDS, 0xfc00003f, 0xec00003e, 0x0, // Floating Negative Multiply-Add Single A-form (fnmadds FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FNMADDSCC, 0xfc00003f, 0xec00003f, 0x0, // Floating Negative Multiply-Add Single A-form (fnmadds. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FNMSUBS, 0xfc00003f, 0xec00003c, 0x0, // Floating Negative Multiply-Subtract Single A-form (fnmsubs FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FNMSUBSCC, 0xfc00003f, 0xec00003d, 0x0, // Floating Negative Multiply-Subtract Single A-form (fnmsubs. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FRES, 0xfc00003f, 0xec000030, 0x1f07c0, // Floating Reciprocal Estimate Single A-form (fres FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRESCC, 0xfc00003f, 0xec000031, 0x1f07c0, // Floating Reciprocal Estimate Single A-form (fres. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRSQRTE, 0xfc00003f, 0xfc000034, 0x1f07c0, // Floating Reciprocal Square Root Estimate A-form (frsqrte FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRSQRTECC, 0xfc00003f, 0xfc000035, 0x1f07c0, // Floating Reciprocal Square Root Estimate A-form (frsqrte. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FSEL, 0xfc00003f, 0xfc00002e, 0x0, // Floating Select A-form (fsel FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FSELCC, 0xfc00003f, 0xfc00002f, 0x0, // Floating Select A-form (fsel. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FSQRTS, 0xfc00003f, 0xec00002c, 0x1f07c0, // Floating Square Root Single A-form (fsqrts FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FSQRTSCC, 0xfc00003f, 0xec00002d, 0x1f07c0, // Floating Square Root Single A-form (fsqrts. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FSUBS, 0xfc00003f, 0xec000028, 0x7c0, // Floating Subtract Single A-form (fsubs FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FSUBSCC, 0xfc00003f, 0xec000029, 0x7c0, // Floating Subtract Single A-form (fsubs. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{ICBI, 0xfc0007fe, 0x7c0007ac, 0x3e00001, // Instruction Cache Block Invalidate X-form (icbi RA,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_16_20}},
 	{LD, 0xfc000003, 0xe8000000, 0x0, // Load Doubleword DS-form (ld RT,DS(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{LDARX, 0xfc0007fe, 0x7c0000a8, 0x0, // Load Doubleword And Reserve Indexed X-form (ldarx RT,RA,RB,EH)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
 	{LDU, 0xfc000003, 0xe8000001, 0x0, // Load Doubleword with Update DS-form (ldu RT,DS(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{LDUX, 0xfc0007fe, 0x7c00006a, 0x1, // Load Doubleword with Update Indexed X-form (ldux RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LDX, 0xfc0007fe, 0x7c00002a, 0x1, // Load Doubleword Indexed X-form (ldx RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LWA, 0xfc000003, 0xe8000002, 0x0, // Load Word Algebraic DS-form (lwa RT,DS(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{LWARX, 0xfc0007fe, 0x7c000028, 0x0, // Load Word & Reserve Indexed X-form (lwarx RT,RA,RB,EH)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20, ap_ImmUnsigned_31_31}},
 	{LWAUX, 0xfc0007fe, 0x7c0002ea, 0x1, // Load Word Algebraic with Update Indexed X-form (lwaux RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LWAX, 0xfc0007fe, 0x7c0002aa, 0x1, // Load Word Algebraic Indexed X-form (lwax RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MFTB, 0xfc0007fe, 0x7c0002e6, 0x1, // Move From Time Base XFX-form (mftb RT,TBR)
-		[5]*argField{ap_Reg_6_10, ap_SpReg_16_20_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_SpReg_16_20_11_15}},
 	{MTMSRD, 0xfc0007fe, 0x7c000164, 0x1ef801, // Move To MSR Doubleword X-form (mtmsrd RS,L)
-		[5]*argField{ap_Reg_6_10, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_6_10, ap_ImmUnsigned_15_15}},
 	{MULHD, 0xfc0003ff, 0x7c000092, 0x400, // Multiply High Doubleword XO-form (mulhd RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULHDCC, 0xfc0003ff, 0x7c000093, 0x400, // Multiply High Doubleword XO-form (mulhd. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULHDU, 0xfc0003ff, 0x7c000012, 0x400, // Multiply High Doubleword Unsigned XO-form (mulhdu RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULHDUCC, 0xfc0003ff, 0x7c000013, 0x400, // Multiply High Doubleword Unsigned XO-form (mulhdu. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULHW, 0xfc0003ff, 0x7c000096, 0x400, // Multiply High Word XO-form (mulhw RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULHWCC, 0xfc0003ff, 0x7c000097, 0x400, // Multiply High Word XO-form (mulhw. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULHWU, 0xfc0003ff, 0x7c000016, 0x400, // Multiply High Word Unsigned XO-form (mulhwu RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULHWUCC, 0xfc0003ff, 0x7c000017, 0x400, // Multiply High Word Unsigned XO-form (mulhwu. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULLD, 0xfc0007ff, 0x7c0001d2, 0x0, // Multiply Low Doubleword XO-form (mulld RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULLDCC, 0xfc0007ff, 0x7c0001d3, 0x0, // Multiply Low Doubleword XO-form (mulld. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULLDO, 0xfc0007ff, 0x7c0005d2, 0x0, // Multiply Low Doubleword XO-form (mulldo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULLDOCC, 0xfc0007ff, 0x7c0005d3, 0x0, // Multiply Low Doubleword XO-form (mulldo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{RFID, 0xfc0007fe, 0x4c000024, 0x3fff801, // Return from Interrupt Doubleword XL-form (rfid)
-		[5]*argField{}},
+		[6]*argField{}},
 	{RLDCL, 0xfc00001f, 0x78000010, 0x0, // Rotate Left Doubleword then Clear Left MDS-form (rldcl RA,RS,RB,MB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDCLCC, 0xfc00001f, 0x78000011, 0x0, // Rotate Left Doubleword then Clear Left MDS-form (rldcl. RA,RS,RB,MB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDCR, 0xfc00001f, 0x78000012, 0x0, // Rotate Left Doubleword then Clear Right MDS-form (rldcr RA,RS,RB,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDCRCC, 0xfc00001f, 0x78000013, 0x0, // Rotate Left Doubleword then Clear Right MDS-form (rldcr. RA,RS,RB,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDIC, 0xfc00001d, 0x78000008, 0x0, // Rotate Left Doubleword Immediate then Clear MD-form (rldic RA,RS,SH,MB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDICCC, 0xfc00001d, 0x78000009, 0x0, // Rotate Left Doubleword Immediate then Clear MD-form (rldic. RA,RS,SH,MB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDICL, 0xfc00001d, 0x78000000, 0x0, // Rotate Left Doubleword Immediate then Clear Left MD-form (rldicl RA,RS,SH,MB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDICLCC, 0xfc00001d, 0x78000001, 0x0, // Rotate Left Doubleword Immediate then Clear Left MD-form (rldicl. RA,RS,SH,MB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDICR, 0xfc00001d, 0x78000004, 0x0, // Rotate Left Doubleword Immediate then Clear Right MD-form (rldicr RA,RS,SH,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDICRCC, 0xfc00001d, 0x78000005, 0x0, // Rotate Left Doubleword Immediate then Clear Right MD-form (rldicr. RA,RS,SH,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDIMI, 0xfc00001d, 0x7800000c, 0x0, // Rotate Left Doubleword Immediate then Mask Insert MD-form (rldimi RA,RS,SH,MB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{RLDIMICC, 0xfc00001d, 0x7800000d, 0x0, // Rotate Left Doubleword Immediate then Mask Insert MD-form (rldimi. RA,RS,SH,MB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20, ap_ImmUnsigned_26_26_21_25}},
 	{SC, 0xfc000002, 0x44000002, 0x3fff01d, // System Call SC-form (sc LEV)
-		[5]*argField{ap_ImmUnsigned_20_26}},
+		[6]*argField{ap_ImmUnsigned_20_26}},
 	{SLBIA, 0xfc0007fe, 0x7c0003e4, 0x31ff801, // SLB Invalidate All X-form (slbia IH)
-		[5]*argField{ap_ImmUnsigned_8_10}},
+		[6]*argField{ap_ImmUnsigned_8_10}},
 	{SLBIE, 0xfc0007fe, 0x7c000364, 0x3ff0001, // SLB Invalidate Entry X-form (slbie RB)
-		[5]*argField{ap_Reg_16_20}},
+		[6]*argField{ap_Reg_16_20}},
 	{SLD, 0xfc0007ff, 0x7c000036, 0x0, // Shift Left Doubleword X-form (sld RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SLDCC, 0xfc0007ff, 0x7c000037, 0x0, // Shift Left Doubleword X-form (sld. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SRAD, 0xfc0007ff, 0x7c000634, 0x0, // Shift Right Algebraic Doubleword X-form (srad RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SRADCC, 0xfc0007ff, 0x7c000635, 0x0, // Shift Right Algebraic Doubleword X-form (srad. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SRADI, 0xfc0007fd, 0x7c000674, 0x0, // Shift Right Algebraic Doubleword Immediate XS-form (sradi RA,RS,SH)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}},
 	{SRADICC, 0xfc0007fd, 0x7c000675, 0x0, // Shift Right Algebraic Doubleword Immediate XS-form (sradi. RA,RS,SH)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_30_30_16_20}},
 	{SRD, 0xfc0007ff, 0x7c000436, 0x0, // Shift Right Doubleword X-form (srd RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SRDCC, 0xfc0007ff, 0x7c000437, 0x0, // Shift Right Doubleword X-form (srd. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{STD, 0xfc000003, 0xf8000000, 0x0, // Store Doubleword DS-form (std RS,DS(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{STDCXCC, 0xfc0007ff, 0x7c0001ad, 0x0, // Store Doubleword Conditional Indexed X-form (stdcx. RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STDU, 0xfc000003, 0xf8000001, 0x0, // Store Doubleword with Update DS-form (stdu RS,DS(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_29_shift2, ap_Reg_11_15}},
 	{STDUX, 0xfc0007fe, 0x7c00016a, 0x1, // Store Doubleword with Update Indexed X-form (stdux RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STDX, 0xfc0007fe, 0x7c00012a, 0x1, // Store Doubleword Indexed X-form (stdx RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STFIWX, 0xfc0007fe, 0x7c0007ae, 0x1, // Store Floating-Point as Integer Word Indexed X-form (stfiwx FRS,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STWCXCC, 0xfc0007ff, 0x7c00012d, 0x0, // Store Word Conditional Indexed X-form (stwcx. RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBF, 0xfc0007ff, 0x7c000050, 0x0, // Subtract From XO-form (subf RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFCC, 0xfc0007ff, 0x7c000051, 0x0, // Subtract From XO-form (subf. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFO, 0xfc0007ff, 0x7c000450, 0x0, // Subtract From XO-form (subfo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFOCC, 0xfc0007ff, 0x7c000451, 0x0, // Subtract From XO-form (subfo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{TD, 0xfc0007fe, 0x7c000088, 0x1, // Trap Doubleword X-form (td TO,RA,RB)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{TDI, 0xfc000000, 0x8000000, 0x0, // Trap Doubleword Immediate D-form (tdi TO,RA,SI)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{TLBSYNC, 0xfc0007fe, 0x7c00046c, 0x3fff801, // TLB Synchronize X-form (tlbsync)
-		[5]*argField{}},
+		[6]*argField{}},
 	{FCTIW, 0xfc0007ff, 0xfc00001c, 0x1f0000, // Floating Convert with round Double-Precision To Signed Word format X-form (fctiw FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIWCC, 0xfc0007ff, 0xfc00001d, 0x1f0000, // Floating Convert with round Double-Precision To Signed Word format X-form (fctiw. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIWZ, 0xfc0007ff, 0xfc00001e, 0x1f0000, // Floating Convert with truncate Double-Precision To Signed Word fomat X-form (fctiwz FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FCTIWZCC, 0xfc0007ff, 0xfc00001f, 0x1f0000, // Floating Convert with truncate Double-Precision To Signed Word fomat X-form (fctiwz. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FSQRT, 0xfc00003f, 0xfc00002c, 0x1f07c0, // Floating Square Root A-form (fsqrt FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FSQRTCC, 0xfc00003f, 0xfc00002d, 0x1f07c0, // Floating Square Root A-form (fsqrt. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{ADD, 0xfc0007ff, 0x7c000214, 0x0, // Add XO-form (add RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDCC, 0xfc0007ff, 0x7c000215, 0x0, // Add XO-form (add. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDO, 0xfc0007ff, 0x7c000614, 0x0, // Add XO-form (addo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDOCC, 0xfc0007ff, 0x7c000615, 0x0, // Add XO-form (addo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDC, 0xfc0007ff, 0x7c000014, 0x0, // Add Carrying XO-form (addc RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDCCC, 0xfc0007ff, 0x7c000015, 0x0, // Add Carrying XO-form (addc. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDCO, 0xfc0007ff, 0x7c000414, 0x0, // Add Carrying XO-form (addco RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDCOCC, 0xfc0007ff, 0x7c000415, 0x0, // Add Carrying XO-form (addco. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDE, 0xfc0007ff, 0x7c000114, 0x0, // Add Extended XO-form (adde RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDECC, 0xfc0007ff, 0x7c000115, 0x0, // Add Extended XO-form (adde. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDEO, 0xfc0007ff, 0x7c000514, 0x0, // Add Extended XO-form (addeo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{ADDEOCC, 0xfc0007ff, 0x7c000515, 0x0, // Add Extended XO-form (addeo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LI, 0xfc1f0000, 0x38000000, 0x0, // Add Immediate D-form (li RT,SI)
-		[5]*argField{ap_Reg_6_10, ap_ImmSigned_16_31}},
+		[6]*argField{ap_Reg_6_10, ap_ImmSigned_16_31}},
 	{ADDI, 0xfc000000, 0x38000000, 0x0, // Add Immediate D-form (addi RT,RA,SI)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{ADDIC, 0xfc000000, 0x30000000, 0x0, // Add Immediate Carrying D-formy (addic RT,RA,SI)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{ADDICCC, 0xfc000000, 0x34000000, 0x0, // Add Immediate Carrying and Record D-form (addic. RT,RA,SI)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{LIS, 0xfc1f0000, 0x3c000000, 0x0, // Add Immediate Shifted D-form (lis RT,SI)
-		[5]*argField{ap_Reg_6_10, ap_ImmSigned_16_31}},
+		[6]*argField{ap_Reg_6_10, ap_ImmSigned_16_31}},
 	{ADDIS, 0xfc000000, 0x3c000000, 0x0, // Add Immediate Shifted D-form (addis RT,RA,SI)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{ADDME, 0xfc0007ff, 0x7c0001d4, 0xf800, // Add to Minus One Extended XO-form (addme RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{ADDMECC, 0xfc0007ff, 0x7c0001d5, 0xf800, // Add to Minus One Extended XO-form (addme. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{ADDMEO, 0xfc0007ff, 0x7c0005d4, 0xf800, // Add to Minus One Extended XO-form (addmeo RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{ADDMEOCC, 0xfc0007ff, 0x7c0005d5, 0xf800, // Add to Minus One Extended XO-form (addmeo. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{ADDZE, 0xfc0007ff, 0x7c000194, 0xf800, // Add to Zero Extended XO-form (addze RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{ADDZECC, 0xfc0007ff, 0x7c000195, 0xf800, // Add to Zero Extended XO-form (addze. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{ADDZEO, 0xfc0007ff, 0x7c000594, 0xf800, // Add to Zero Extended XO-form (addzeo RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{ADDZEOCC, 0xfc0007ff, 0x7c000595, 0xf800, // Add to Zero Extended XO-form (addzeo. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{AND, 0xfc0007ff, 0x7c000038, 0x0, // AND X-form (and RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{ANDCC, 0xfc0007ff, 0x7c000039, 0x0, // AND X-form (and. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{ANDC, 0xfc0007ff, 0x7c000078, 0x0, // AND with Complement X-form (andc RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{ANDCCC, 0xfc0007ff, 0x7c000079, 0x0, // AND with Complement X-form (andc. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{ANDICC, 0xfc000000, 0x70000000, 0x0, // AND Immediate D-form (andi. RA,RS,UI)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
 	{ANDISCC, 0xfc000000, 0x74000000, 0x0, // AND Immediate Shifted D-form (andis. RA,RS,UI)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
 	{B, 0xfc000003, 0x48000000, 0x0, // Branch I-form (b target_addr)
-		[5]*argField{ap_PCRel_6_29_shift2}},
+		[6]*argField{ap_PCRel_6_29_shift2}},
 	{BA, 0xfc000003, 0x48000002, 0x0, // Branch I-form (ba target_addr)
-		[5]*argField{ap_Label_6_29_shift2}},
+		[6]*argField{ap_Label_6_29_shift2}},
 	{BL, 0xfc000003, 0x48000001, 0x0, // Branch I-form (bl target_addr)
-		[5]*argField{ap_PCRel_6_29_shift2}},
+		[6]*argField{ap_PCRel_6_29_shift2}},
 	{BLA, 0xfc000003, 0x48000003, 0x0, // Branch I-form (bla target_addr)
-		[5]*argField{ap_Label_6_29_shift2}},
+		[6]*argField{ap_Label_6_29_shift2}},
 	{BC, 0xfc000003, 0x40000000, 0x0, // Branch Conditional B-form (bc BO,BI,target_addr)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_PCRel_16_29_shift2}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_PCRel_16_29_shift2}},
 	{BCA, 0xfc000003, 0x40000002, 0x0, // Branch Conditional B-form (bca BO,BI,target_addr)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_Label_16_29_shift2}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_Label_16_29_shift2}},
 	{BCL, 0xfc000003, 0x40000001, 0x0, // Branch Conditional B-form (bcl BO,BI,target_addr)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_PCRel_16_29_shift2}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_PCRel_16_29_shift2}},
 	{BCLA, 0xfc000003, 0x40000003, 0x0, // Branch Conditional B-form (bcla BO,BI,target_addr)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_Label_16_29_shift2}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_Label_16_29_shift2}},
 	{BCCTR, 0xfc0007ff, 0x4c000420, 0xe000, // Branch Conditional to Count Register XL-form (bcctr BO,BI,BH)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
 	{BCCTRL, 0xfc0007ff, 0x4c000421, 0xe000, // Branch Conditional to Count Register XL-form (bcctrl BO,BI,BH)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
 	{BCLR, 0xfc0007ff, 0x4c000020, 0xe000, // Branch Conditional to Link Register XL-form (bclr BO,BI,BH)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
 	{BCLRL, 0xfc0007ff, 0x4c000021, 0xe000, // Branch Conditional to Link Register XL-form (bclrl BO,BI,BH)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_CondRegBit_11_15, ap_ImmUnsigned_19_20}},
 	{CMPW, 0xfc2007fe, 0x7c000000, 0x400001, // Compare X-form (cmpw BF,RA,RB)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
 	{CMPD, 0xfc2007fe, 0x7c200000, 0x400001, // Compare X-form (cmpd BF,RA,RB)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
 	{CMP, 0xfc0007fe, 0x7c000000, 0x400001, // Compare X-form (cmp BF,L,RA,RB)
-		[5]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{CMPWI, 0xfc200000, 0x2c000000, 0x400000, // Compare Immediate D-form (cmpwi BF,RA,SI)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{CMPDI, 0xfc200000, 0x2c200000, 0x400000, // Compare Immediate D-form (cmpdi BF,RA,SI)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{CMPI, 0xfc000000, 0x2c000000, 0x400000, // Compare Immediate D-form (cmpi BF,L,RA,SI)
-		[5]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{CMPLW, 0xfc2007fe, 0x7c000040, 0x400001, // Compare Logical X-form (cmplw BF,RA,RB)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
 	{CMPLD, 0xfc2007fe, 0x7c200040, 0x400001, // Compare Logical X-form (cmpld BF,RA,RB)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_Reg_16_20}},
 	{CMPL, 0xfc0007fe, 0x7c000040, 0x400001, // Compare Logical X-form (cmpl BF,L,RA,RB)
-		[5]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{CMPLWI, 0xfc200000, 0x28000000, 0x400000, // Compare Logical Immediate D-form (cmplwi BF,RA,UI)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_ImmUnsigned_16_31}},
 	{CMPLDI, 0xfc200000, 0x28200000, 0x400000, // Compare Logical Immediate D-form (cmpldi BF,RA,UI)
-		[5]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_CondRegField_6_8, ap_Reg_11_15, ap_ImmUnsigned_16_31}},
 	{CMPLI, 0xfc000000, 0x28000000, 0x400000, // Compare Logical Immediate D-form (cmpli BF,L,RA,UI)
-		[5]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_CondRegField_6_8, ap_ImmUnsigned_10_10, ap_Reg_11_15, ap_ImmUnsigned_16_31}},
 	{CNTLZW, 0xfc0007ff, 0x7c000034, 0xf800, // Count Leading Zeros Word X-form (cntlzw RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{CNTLZWCC, 0xfc0007ff, 0x7c000035, 0xf800, // Count Leading Zeros Word X-form (cntlzw. RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{CRAND, 0xfc0007fe, 0x4c000202, 0x1, // Condition Register AND XL-form (crand BT,BA,BB)
-		[5]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
+		[6]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
 	{CRANDC, 0xfc0007fe, 0x4c000102, 0x1, // Condition Register AND with Complement XL-form (crandc BT,BA,BB)
-		[5]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
+		[6]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
 	{CREQV, 0xfc0007fe, 0x4c000242, 0x1, // Condition Register Equivalent XL-form (creqv BT,BA,BB)
-		[5]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
+		[6]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
 	{CRNAND, 0xfc0007fe, 0x4c0001c2, 0x1, // Condition Register NAND XL-form (crnand BT,BA,BB)
-		[5]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
+		[6]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
 	{CRNOR, 0xfc0007fe, 0x4c000042, 0x1, // Condition Register NOR XL-form (crnor BT,BA,BB)
-		[5]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
+		[6]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
 	{CROR, 0xfc0007fe, 0x4c000382, 0x1, // Condition Register OR XL-form (cror BT,BA,BB)
-		[5]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
+		[6]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
 	{CRORC, 0xfc0007fe, 0x4c000342, 0x1, // Condition Register OR with Complement XL-form (crorc BT,BA,BB)
-		[5]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
+		[6]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
 	{CRXOR, 0xfc0007fe, 0x4c000182, 0x1, // Condition Register XOR XL-form (crxor BT,BA,BB)
-		[5]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
+		[6]*argField{ap_CondRegBit_6_10, ap_CondRegBit_11_15, ap_CondRegBit_16_20}},
 	{DCBZ, 0xfc0007fe, 0x7c0007ec, 0x3e00001, // Data Cache Block set to Zero X-form (dcbz RA,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_16_20}},
 	{EQV, 0xfc0007ff, 0x7c000238, 0x0, // Equivalent X-form (eqv RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{EQVCC, 0xfc0007ff, 0x7c000239, 0x0, // Equivalent X-form (eqv. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{EXTSH, 0xfc0007ff, 0x7c000734, 0xf800, // Extend Sign Halfword X-form (extsh RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{EXTSHCC, 0xfc0007ff, 0x7c000735, 0xf800, // Extend Sign Halfword X-form (extsh. RA,RS)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10}},
 	{FABS, 0xfc0007ff, 0xfc000210, 0x1f0000, // Floating Absolute Value X-form (fabs FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FABSCC, 0xfc0007ff, 0xfc000211, 0x1f0000, // Floating Absolute Value X-form (fabs. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FADD, 0xfc00003f, 0xfc00002a, 0x7c0, // Floating Add A-form (fadd FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FADDCC, 0xfc00003f, 0xfc00002b, 0x7c0, // Floating Add A-form (fadd. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FCMPO, 0xfc0007fe, 0xfc000040, 0x600001, // Floating Compare Ordered X-form (fcmpo BF,FRA,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FCMPU, 0xfc0007fe, 0xfc000000, 0x600001, // Floating Compare Unordered X-form (fcmpu BF,FRA,FRB)
-		[5]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_CondRegField_6_8, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FDIV, 0xfc00003f, 0xfc000024, 0x7c0, // Floating Divide A-form (fdiv FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FDIVCC, 0xfc00003f, 0xfc000025, 0x7c0, // Floating Divide A-form (fdiv. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FMADD, 0xfc00003f, 0xfc00003a, 0x0, // Floating Multiply-Add A-form (fmadd FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FMADDCC, 0xfc00003f, 0xfc00003b, 0x0, // Floating Multiply-Add A-form (fmadd. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FMR, 0xfc0007ff, 0xfc000090, 0x1f0000, // Floating Move Register X-form (fmr FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FMRCC, 0xfc0007ff, 0xfc000091, 0x1f0000, // Floating Move Register X-form (fmr. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FMSUB, 0xfc00003f, 0xfc000038, 0x0, // Floating Multiply-Subtract A-form (fmsub FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FMSUBCC, 0xfc00003f, 0xfc000039, 0x0, // Floating Multiply-Subtract A-form (fmsub. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FMUL, 0xfc00003f, 0xfc000032, 0xf800, // Floating Multiply A-form (fmul FRT,FRA,FRC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25}},
 	{FMULCC, 0xfc00003f, 0xfc000033, 0xf800, // Floating Multiply A-form (fmul. FRT,FRA,FRC)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25}},
 	{FNABS, 0xfc0007ff, 0xfc000110, 0x1f0000, // Floating Negative Absolute Value X-form (fnabs FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FNABSCC, 0xfc0007ff, 0xfc000111, 0x1f0000, // Floating Negative Absolute Value X-form (fnabs. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FNEG, 0xfc0007ff, 0xfc000050, 0x1f0000, // Floating Negate X-form (fneg FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FNEGCC, 0xfc0007ff, 0xfc000051, 0x1f0000, // Floating Negate X-form (fneg. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FNMADD, 0xfc00003f, 0xfc00003e, 0x0, // Floating Negative Multiply-Add A-form (fnmadd FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FNMADDCC, 0xfc00003f, 0xfc00003f, 0x0, // Floating Negative Multiply-Add A-form (fnmadd. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FNMSUB, 0xfc00003f, 0xfc00003c, 0x0, // Floating Negative Multiply-Subtract A-form (fnmsub FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FNMSUBCC, 0xfc00003f, 0xfc00003d, 0x0, // Floating Negative Multiply-Subtract A-form (fnmsub. FRT,FRA,FRC,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_21_25, ap_FPReg_16_20}},
 	{FRSP, 0xfc0007ff, 0xfc000018, 0x1f0000, // Floating Round to Single-Precision X-form (frsp FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FRSPCC, 0xfc0007ff, 0xfc000019, 0x1f0000, // Floating Round to Single-Precision X-form (frsp. FRT,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_16_20}},
 	{FSUB, 0xfc00003f, 0xfc000028, 0x7c0, // Floating Subtract A-form (fsub FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{FSUBCC, 0xfc00003f, 0xfc000029, 0x7c0, // Floating Subtract A-form (fsub. FRT,FRA,FRB)
-		[5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_FPReg_16_20}},
 	{ISYNC, 0xfc0007fe, 0x4c00012c, 0x3fff801, // Instruction Synchronize XL-form (isync)
-		[5]*argField{}},
+		[6]*argField{}},
 	{LBZ, 0xfc000000, 0x88000000, 0x0, // Load Byte and Zero D-form (lbz RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LBZU, 0xfc000000, 0x8c000000, 0x0, // Load Byte and Zero with Update D-form (lbzu RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LBZUX, 0xfc0007fe, 0x7c0000ee, 0x1, // Load Byte and Zero with Update Indexed X-form (lbzux RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LBZX, 0xfc0007fe, 0x7c0000ae, 0x1, // Load Byte and Zero Indexed X-form (lbzx RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LFD, 0xfc000000, 0xc8000000, 0x0, // Load Floating-Point Double D-form (lfd FRT,D(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LFDU, 0xfc000000, 0xcc000000, 0x0, // Load Floating-Point Double with Update D-form (lfdu FRT,D(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LFDUX, 0xfc0007fe, 0x7c0004ee, 0x1, // Load Floating-Point Double with Update Indexed X-form (lfdux FRT,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LFDX, 0xfc0007fe, 0x7c0004ae, 0x1, // Load Floating-Point Double Indexed X-form (lfdx FRT,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LFS, 0xfc000000, 0xc0000000, 0x0, // Load Floating-Point Single D-form (lfs FRT,D(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LFSU, 0xfc000000, 0xc4000000, 0x0, // Load Floating-Point Single with Update D-form (lfsu FRT,D(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LFSUX, 0xfc0007fe, 0x7c00046e, 0x1, // Load Floating-Point Single with Update Indexed X-form (lfsux FRT,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LFSX, 0xfc0007fe, 0x7c00042e, 0x1, // Load Floating-Point Single Indexed X-form (lfsx FRT,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LHA, 0xfc000000, 0xa8000000, 0x0, // Load Halfword Algebraic D-form (lha RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LHAU, 0xfc000000, 0xac000000, 0x0, // Load Halfword Algebraic with Update D-form (lhau RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LHAUX, 0xfc0007fe, 0x7c0002ee, 0x1, // Load Halfword Algebraic with Update Indexed X-form (lhaux RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LHAX, 0xfc0007fe, 0x7c0002ae, 0x1, // Load Halfword Algebraic Indexed X-form (lhax RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LHBRX, 0xfc0007fe, 0x7c00062c, 0x1, // Load Halfword Byte-Reverse Indexed X-form (lhbrx RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LHZ, 0xfc000000, 0xa0000000, 0x0, // Load Halfword and Zero D-form (lhz RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LHZU, 0xfc000000, 0xa4000000, 0x0, // Load Halfword and Zero with Update D-form (lhzu RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LHZUX, 0xfc0007fe, 0x7c00026e, 0x1, // Load Halfword and Zero with Update Indexed X-form (lhzux RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LHZX, 0xfc0007fe, 0x7c00022e, 0x1, // Load Halfword and Zero Indexed X-form (lhzx RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LMW, 0xfc000000, 0xb8000000, 0x0, // Load Multiple Word D-form (lmw RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LSWI, 0xfc0007fe, 0x7c0004aa, 0x1, // Load String Word Immediate X-form (lswi RT,RA,NB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
 	{LSWX, 0xfc0007fe, 0x7c00042a, 0x1, // Load String Word Indexed X-form (lswx RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LWBRX, 0xfc0007fe, 0x7c00042c, 0x1, // Load Word Byte-Reverse Indexed X-form (lwbrx RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LWZ, 0xfc000000, 0x80000000, 0x0, // Load Word and Zero D-form (lwz RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LWZU, 0xfc000000, 0x84000000, 0x0, // Load Word and Zero with Update D-form (lwzu RT,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{LWZUX, 0xfc0007fe, 0x7c00006e, 0x1, // Load Word and Zero with Update Indexed X-form (lwzux RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{LWZX, 0xfc0007fe, 0x7c00002e, 0x1, // Load Word and Zero Indexed X-form (lwzx RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MCRF, 0xfc0007fe, 0x4c000000, 0x63f801, // Move Condition Register Field XL-form (mcrf BF,BFA)
-		[5]*argField{ap_CondRegField_6_8, ap_CondRegField_11_13}},
+		[6]*argField{ap_CondRegField_6_8, ap_CondRegField_11_13}},
 	{MCRFS, 0xfc0007fe, 0xfc000080, 0x63f801, // Move to Condition Register from FPSCR X-form (mcrfs BF,BFA)
-		[5]*argField{ap_CondRegField_6_8, ap_CondRegField_11_13}},
+		[6]*argField{ap_CondRegField_6_8, ap_CondRegField_11_13}},
 	{MFCR, 0xfc1007fe, 0x7c000026, 0xff801, // Move From Condition Register XFX-form (mfcr RT)
-		[5]*argField{ap_Reg_6_10}},
+		[6]*argField{ap_Reg_6_10}},
 	{MFFS, 0xfc1f07ff, 0xfc00048e, 0xf800, // Move From FPSCR X-form (mffs FRT)
-		[5]*argField{ap_FPReg_6_10}},
+		[6]*argField{ap_FPReg_6_10}},
 	{MFFSCC, 0xfc1f07ff, 0xfc00048f, 0xf800, // Move From FPSCR X-form (mffs. FRT)
-		[5]*argField{ap_FPReg_6_10}},
+		[6]*argField{ap_FPReg_6_10}},
 	{MFMSR, 0xfc0007fe, 0x7c0000a6, 0x1ff801, // Move From MSR X-form (mfmsr RT)
-		[5]*argField{ap_Reg_6_10}},
+		[6]*argField{ap_Reg_6_10}},
 	{MFSPR, 0xfc0007fe, 0x7c0002a6, 0x1, // Move From Special Purpose Register XFX-form (mfspr RT,SPR)
-		[5]*argField{ap_Reg_6_10, ap_SpReg_16_20_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_SpReg_16_20_11_15}},
 	{MTCRF, 0xfc1007fe, 0x7c000120, 0x801, // Move To Condition Register Fields XFX-form (mtcrf FXM,RS)
-		[5]*argField{ap_ImmUnsigned_12_19, ap_Reg_6_10}},
+		[6]*argField{ap_ImmUnsigned_12_19, ap_Reg_6_10}},
 	{MTFSB0, 0xfc0007ff, 0xfc00008c, 0x1ff800, // Move To FPSCR Bit 0 X-form (mtfsb0 BT)
-		[5]*argField{ap_ImmUnsigned_6_10}},
+		[6]*argField{ap_ImmUnsigned_6_10}},
 	{MTFSB0CC, 0xfc0007ff, 0xfc00008d, 0x1ff800, // Move To FPSCR Bit 0 X-form (mtfsb0. BT)
-		[5]*argField{ap_ImmUnsigned_6_10}},
+		[6]*argField{ap_ImmUnsigned_6_10}},
 	{MTFSB1, 0xfc0007ff, 0xfc00004c, 0x1ff800, // Move To FPSCR Bit 1 X-form (mtfsb1 BT)
-		[5]*argField{ap_ImmUnsigned_6_10}},
+		[6]*argField{ap_ImmUnsigned_6_10}},
 	{MTFSB1CC, 0xfc0007ff, 0xfc00004d, 0x1ff800, // Move To FPSCR Bit 1 X-form (mtfsb1. BT)
-		[5]*argField{ap_ImmUnsigned_6_10}},
+		[6]*argField{ap_ImmUnsigned_6_10}},
 	{MTFSF, 0xfc0007ff, 0xfc00058e, 0x0, // Move To FPSCR Fields XFL-form (mtfsf FLM,FRB,L,W)
-		[5]*argField{ap_ImmUnsigned_7_14, ap_FPReg_16_20, ap_ImmUnsigned_6_6, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_ImmUnsigned_7_14, ap_FPReg_16_20, ap_ImmUnsigned_6_6, ap_ImmUnsigned_15_15}},
 	{MTFSFCC, 0xfc0007ff, 0xfc00058f, 0x0, // Move To FPSCR Fields XFL-form (mtfsf. FLM,FRB,L,W)
-		[5]*argField{ap_ImmUnsigned_7_14, ap_FPReg_16_20, ap_ImmUnsigned_6_6, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_ImmUnsigned_7_14, ap_FPReg_16_20, ap_ImmUnsigned_6_6, ap_ImmUnsigned_15_15}},
 	{MTFSFI, 0xfc0007ff, 0xfc00010c, 0x7e0800, // Move To FPSCR Field Immediate X-form (mtfsfi BF,U,W)
-		[5]*argField{ap_ImmUnsigned_6_8, ap_ImmUnsigned_16_19, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_ImmUnsigned_6_8, ap_ImmUnsigned_16_19, ap_ImmUnsigned_15_15}},
 	{MTFSFICC, 0xfc0007ff, 0xfc00010d, 0x7e0800, // Move To FPSCR Field Immediate X-form (mtfsfi. BF,U,W)
-		[5]*argField{ap_ImmUnsigned_6_8, ap_ImmUnsigned_16_19, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_ImmUnsigned_6_8, ap_ImmUnsigned_16_19, ap_ImmUnsigned_15_15}},
 	{MTMSR, 0xfc0007fe, 0x7c000124, 0x1ef801, // Move To MSR X-form (mtmsr RS,L)
-		[5]*argField{ap_Reg_6_10, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_6_10, ap_ImmUnsigned_15_15}},
 	{MTSPR, 0xfc0007fe, 0x7c0003a6, 0x1, // Move To Special Purpose Register XFX-form (mtspr SPR,RS)
-		[5]*argField{ap_SpReg_16_20_11_15, ap_Reg_6_10}},
+		[6]*argField{ap_SpReg_16_20_11_15, ap_Reg_6_10}},
 	{MULLI, 0xfc000000, 0x1c000000, 0x0, // Multiply Low Immediate D-form (mulli RT,RA,SI)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{MULLW, 0xfc0007ff, 0x7c0001d6, 0x0, // Multiply Low Word XO-form (mullw RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULLWCC, 0xfc0007ff, 0x7c0001d7, 0x0, // Multiply Low Word XO-form (mullw. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULLWO, 0xfc0007ff, 0x7c0005d6, 0x0, // Multiply Low Word XO-form (mullwo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{MULLWOCC, 0xfc0007ff, 0x7c0005d7, 0x0, // Multiply Low Word XO-form (mullwo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{NAND, 0xfc0007ff, 0x7c0003b8, 0x0, // NAND X-form (nand RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{NANDCC, 0xfc0007ff, 0x7c0003b9, 0x0, // NAND X-form (nand. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{NEG, 0xfc0007ff, 0x7c0000d0, 0xf800, // Negate XO-form (neg RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{NEGCC, 0xfc0007ff, 0x7c0000d1, 0xf800, // Negate XO-form (neg. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{NEGO, 0xfc0007ff, 0x7c0004d0, 0xf800, // Negate XO-form (nego RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{NEGOCC, 0xfc0007ff, 0x7c0004d1, 0xf800, // Negate XO-form (nego. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{NOR, 0xfc0007ff, 0x7c0000f8, 0x0, // NOR X-form (nor RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{NORCC, 0xfc0007ff, 0x7c0000f9, 0x0, // NOR X-form (nor. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{OR, 0xfc0007ff, 0x7c000378, 0x0, // OR X-form (or RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{ORCC, 0xfc0007ff, 0x7c000379, 0x0, // OR X-form (or. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{ORC, 0xfc0007ff, 0x7c000338, 0x0, // OR with Complement X-form (orc RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{ORCCC, 0xfc0007ff, 0x7c000339, 0x0, // OR with Complement X-form (orc. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+	{NOP, 0xffffffff, 0x60000000, 0x0, // OR Immediate D-form (nop)
+		[6]*argField{}},
 	{ORI, 0xfc000000, 0x60000000, 0x0, // OR Immediate D-form (ori RA,RS,UI)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
 	{ORIS, 0xfc000000, 0x64000000, 0x0, // OR Immediate Shifted D-form (oris RA,RS,UI)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
 	{RLWIMI, 0xfc000001, 0x50000000, 0x0, // Rotate Left Word Immediate then Mask Insert M-form (rlwimi RA,RS,SH,MB,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
 	{RLWIMICC, 0xfc000001, 0x50000001, 0x0, // Rotate Left Word Immediate then Mask Insert M-form (rlwimi. RA,RS,SH,MB,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
 	{RLWINM, 0xfc000001, 0x54000000, 0x0, // Rotate Left Word Immediate then AND with Mask M-form (rlwinm RA,RS,SH,MB,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
 	{RLWINMCC, 0xfc000001, 0x54000001, 0x0, // Rotate Left Word Immediate then AND with Mask M-form (rlwinm. RA,RS,SH,MB,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
 	{RLWNM, 0xfc000001, 0x5c000000, 0x0, // Rotate Left Word then AND with Mask M-form (rlwnm RA,RS,RB,MB,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
 	{RLWNMCC, 0xfc000001, 0x5c000001, 0x0, // Rotate Left Word then AND with Mask M-form (rlwnm. RA,RS,RB,MB,ME)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20, ap_ImmUnsigned_21_25, ap_ImmUnsigned_26_30}},
 	{SLW, 0xfc0007ff, 0x7c000030, 0x0, // Shift Left Word X-form (slw RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SLWCC, 0xfc0007ff, 0x7c000031, 0x0, // Shift Left Word X-form (slw. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SRAW, 0xfc0007ff, 0x7c000630, 0x0, // Shift Right Algebraic Word X-form (sraw RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SRAWCC, 0xfc0007ff, 0x7c000631, 0x0, // Shift Right Algebraic Word X-form (sraw. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SRAWI, 0xfc0007ff, 0x7c000670, 0x0, // Shift Right Algebraic Word Immediate X-form (srawi RA,RS,SH)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20}},
 	{SRAWICC, 0xfc0007ff, 0x7c000671, 0x0, // Shift Right Algebraic Word Immediate X-form (srawi. RA,RS,SH)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_20}},
 	{SRW, 0xfc0007ff, 0x7c000430, 0x0, // Shift Right Word X-form (srw RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{SRWCC, 0xfc0007ff, 0x7c000431, 0x0, // Shift Right Word X-form (srw. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{STB, 0xfc000000, 0x98000000, 0x0, // Store Byte D-form (stb RS,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STBU, 0xfc000000, 0x9c000000, 0x0, // Store Byte with Update D-form (stbu RS,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STBUX, 0xfc0007fe, 0x7c0001ee, 0x1, // Store Byte with Update Indexed X-form (stbux RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STBX, 0xfc0007fe, 0x7c0001ae, 0x1, // Store Byte Indexed X-form (stbx RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STFD, 0xfc000000, 0xd8000000, 0x0, // Store Floating-Point Double D-form (stfd FRS,D(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STFDU, 0xfc000000, 0xdc000000, 0x0, // Store Floating-Point Double with Update D-form (stfdu FRS,D(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STFDUX, 0xfc0007fe, 0x7c0005ee, 0x1, // Store Floating-Point Double with Update Indexed X-form (stfdux FRS,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STFDX, 0xfc0007fe, 0x7c0005ae, 0x1, // Store Floating-Point Double Indexed X-form (stfdx FRS,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STFS, 0xfc000000, 0xd0000000, 0x0, // Store Floating-Point Single D-form (stfs FRS,D(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STFSU, 0xfc000000, 0xd4000000, 0x0, // Store Floating-Point Single with Update D-form (stfsu FRS,D(RA))
-		[5]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_FPReg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STFSUX, 0xfc0007fe, 0x7c00056e, 0x1, // Store Floating-Point Single with Update Indexed X-form (stfsux FRS,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STFSX, 0xfc0007fe, 0x7c00052e, 0x1, // Store Floating-Point Single Indexed X-form (stfsx FRS,RA,RB)
-		[5]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_FPReg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STH, 0xfc000000, 0xb0000000, 0x0, // Store Halfword D-form (sth RS,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STHBRX, 0xfc0007fe, 0x7c00072c, 0x1, // Store Halfword Byte-Reverse Indexed X-form (sthbrx RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STHU, 0xfc000000, 0xb4000000, 0x0, // Store Halfword with Update D-form (sthu RS,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STHUX, 0xfc0007fe, 0x7c00036e, 0x1, // Store Halfword with Update Indexed X-form (sthux RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STHX, 0xfc0007fe, 0x7c00032e, 0x1, // Store Halfword Indexed X-form (sthx RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STMW, 0xfc000000, 0xbc000000, 0x0, // Store Multiple Word D-form (stmw RS,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STSWI, 0xfc0007fe, 0x7c0005aa, 0x1, // Store String Word Immediate X-form (stswi RS,RA,NB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmUnsigned_16_20}},
 	{STSWX, 0xfc0007fe, 0x7c00052a, 0x1, // Store String Word Indexed X-form (stswx RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STW, 0xfc000000, 0x90000000, 0x0, // Store Word D-form (stw RS,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STWBRX, 0xfc0007fe, 0x7c00052c, 0x1, // Store Word Byte-Reverse Indexed X-form (stwbrx RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STWU, 0xfc000000, 0x94000000, 0x0, // Store Word with Update D-form (stwu RS,D(RA))
-		[5]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Offset_16_31, ap_Reg_11_15}},
 	{STWUX, 0xfc0007fe, 0x7c00016e, 0x1, // Store Word with Update Indexed X-form (stwux RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{STWX, 0xfc0007fe, 0x7c00012e, 0x1, // Store Word Indexed X-form (stwx RS,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFC, 0xfc0007ff, 0x7c000010, 0x0, // Subtract From Carrying XO-form (subfc RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFCCC, 0xfc0007ff, 0x7c000011, 0x0, // Subtract From Carrying XO-form (subfc. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFCO, 0xfc0007ff, 0x7c000410, 0x0, // Subtract From Carrying XO-form (subfco RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFCOCC, 0xfc0007ff, 0x7c000411, 0x0, // Subtract From Carrying XO-form (subfco. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFE, 0xfc0007ff, 0x7c000110, 0x0, // Subtract From Extended XO-form (subfe RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFECC, 0xfc0007ff, 0x7c000111, 0x0, // Subtract From Extended XO-form (subfe. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFEO, 0xfc0007ff, 0x7c000510, 0x0, // Subtract From Extended XO-form (subfeo RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFEOCC, 0xfc0007ff, 0x7c000511, 0x0, // Subtract From Extended XO-form (subfeo. RT,RA,RB)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{SUBFIC, 0xfc000000, 0x20000000, 0x0, // Subtract From Immediate Carrying D-form (subfic RT,RA,SI)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{SUBFME, 0xfc0007ff, 0x7c0001d0, 0xf800, // Subtract From Minus One Extended XO-form (subfme RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{SUBFMECC, 0xfc0007ff, 0x7c0001d1, 0xf800, // Subtract From Minus One Extended XO-form (subfme. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{SUBFMEO, 0xfc0007ff, 0x7c0005d0, 0xf800, // Subtract From Minus One Extended XO-form (subfmeo RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{SUBFMEOCC, 0xfc0007ff, 0x7c0005d1, 0xf800, // Subtract From Minus One Extended XO-form (subfmeo. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{SUBFZE, 0xfc0007ff, 0x7c000190, 0xf800, // Subtract From Zero Extended XO-form (subfze RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{SUBFZECC, 0xfc0007ff, 0x7c000191, 0xf800, // Subtract From Zero Extended XO-form (subfze. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{SUBFZEO, 0xfc0007ff, 0x7c000590, 0xf800, // Subtract From Zero Extended XO-form (subfzeo RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{SUBFZEOCC, 0xfc0007ff, 0x7c000591, 0xf800, // Subtract From Zero Extended XO-form (subfzeo. RT,RA)
-		[5]*argField{ap_Reg_6_10, ap_Reg_11_15}},
+		[6]*argField{ap_Reg_6_10, ap_Reg_11_15}},
 	{SYNC, 0xfc0007fe, 0x7c0004ac, 0x31cf801, // Synchronize X-form (sync L,SC)
-		[5]*argField{ap_ImmUnsigned_8_10, ap_ImmUnsigned_14_15}},
+		[6]*argField{ap_ImmUnsigned_8_10, ap_ImmUnsigned_14_15}},
 	{TLBIE, 0xfc0007fe, 0x7c000264, 0x100001, // TLB Invalidate Entry X-form (tlbie RB,RS,RIC,PRS,R)
-		[5]*argField{ap_Reg_16_20, ap_Reg_6_10, ap_ImmUnsigned_12_13, ap_ImmUnsigned_14_14, ap_ImmUnsigned_15_15}},
+		[6]*argField{ap_Reg_16_20, ap_Reg_6_10, ap_ImmUnsigned_12_13, ap_ImmUnsigned_14_14, ap_ImmUnsigned_15_15}},
 	{TW, 0xfc0007fe, 0x7c000008, 0x1, // Trap Word X-form (tw TO,RA,RB)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_Reg_11_15, ap_Reg_16_20}},
 	{TWI, 0xfc000000, 0xc000000, 0x0, // Trap Word Immediate D-form (twi TO,RA,SI)
-		[5]*argField{ap_ImmUnsigned_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
+		[6]*argField{ap_ImmUnsigned_6_10, ap_Reg_11_15, ap_ImmSigned_16_31}},
 	{XOR, 0xfc0007ff, 0x7c000278, 0x0, // XOR X-form (xor RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{XORCC, 0xfc0007ff, 0x7c000279, 0x0, // XOR X-form (xor. RA,RS,RB)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_Reg_16_20}},
 	{XORI, 0xfc000000, 0x68000000, 0x0, // XOR Immediate D-form (xori RA,RS,UI)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
 	{XORIS, 0xfc000000, 0x6c000000, 0x0, // XOR Immediate Shifted D-form (xoris RA,RS,UI)
-		[5]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
+		[6]*argField{ap_Reg_11_15, ap_Reg_6_10, ap_ImmUnsigned_16_31}},
 }
diff --git a/ppc64/ppc64map/map.go b/ppc64/ppc64map/map.go
index 9346061..925578d 100644
--- a/ppc64/ppc64map/map.go
+++ b/ppc64/ppc64map/map.go
@@ -97,10 +97,6 @@
 
 	p := &Prog{}
 	for _, row := range table {
-		// TODO: add support for prefixed instructions. Ignore for now.
-		if row[2][0] == ',' {
-			continue
-		}
 		add(p, row[0], row[1], row[2], row[3])
 	}
 	return p, nil
@@ -123,13 +119,16 @@
 }
 
 type Inst struct {
-	Text     string
-	Encoding string
-	Op       string
-	Mask     uint32
-	Value    uint32
-	DontCare uint32
-	Fields   []Field
+	Text      string
+	Encoding  string
+	Op        string
+	Mask      uint32
+	Value     uint32
+	DontCare  uint32
+	SMask     uint32 // The opcode Mask of the suffix word
+	SValue    uint32 // Likewise for the Value
+	SDontCare uint32 // Likewise for the DontCare bits
+	Fields    []Field
 }
 
 func (i Inst) String() string {
@@ -140,6 +139,9 @@
 	Name string
 	Bits int8
 	Offs int8
+	// Instruction word position.  0 for single word instructions (all < ISA 3.1 insn)
+	// For prefixed instructions, 0 for the prefix word, 1 for the second insn word.
+	Word int8
 }
 
 func (a Arg) String() string {
@@ -209,28 +211,27 @@
 	return bits.OnesCount32(i[j].Mask) > bits.OnesCount32(i[k].Mask)
 }
 
-// add adds the entry from the CSV described by text, mnemonics, encoding, and tags
-// to the program p.
-func add(p *Prog, text, mnemonics, encoding, tags string) {
-	// Parse encoding, building size and offset of each field.
-	// The first field in the encoding is the smallest offset.
-	// And note the MSB is bit 0, not bit 31.
-	// Example: "31@0|RS@6|RA@11|///@16|26@21|Rc@31|"
-	var args Args
+// Split the string encoding into an Args. The encoding string loosely matches the regex
+// (arg@bitpos|)+
+func parseFields(encoding, text string, word int8) Args {
 	var err error
+	var args Args
+
 	fields := strings.Split(encoding, "|")
+
 	for i, f := range fields {
 		name, off := "", -1
 		if f == "" {
 			off = 32
 			if i == 0 || i != len(fields)-1 {
 				fmt.Fprintf(os.Stderr, "%s: wrong %d-th encoding field: %q\n", text, i, f)
-				return
+				panic("Invalid encoding entry.")
 			}
 		} else {
 			j := strings.Index(f, "@")
 			if j < 0 {
 				fmt.Fprintf(os.Stderr, "%s: wrong %d-th encoding field: %q\n", text, i, f)
+				panic("Invalid encoding entry.")
 				continue
 			}
 			k := strings.Index(f[j+1:], " ")
@@ -249,12 +250,18 @@
 			args[len(args)-1].Bits += int8(off)
 		}
 		if name != "" {
-			arg := Arg{Name: name, Offs: int8(off), Bits: int8(-off)}
+			arg := Arg{Name: name, Offs: int8(off), Bits: int8(-off), Word: word}
 			args.Append(arg)
 		}
 	}
 
-	var mask, value, dontCare uint32
+	return args
+}
+
+// Compute the Mask (usually Opcode + secondary Opcode bitfields),
+// the Value (the expected value under the mask), and
+// reserved bits (i.e the // fields which should be set to 0)
+func computeMaskValueReserved(args Args, text string) (mask, value, reserved uint32) {
 	for i := 0; i < len(args); i++ {
 		arg := args[i]
 		v, err := strconv.Atoi(arg.Name)
@@ -271,7 +278,7 @@
 			if arg.Name != strings.Repeat("/", len(arg.Name)) {
 				log.Fatalf("%s: arg %v named like a don't care bit, but it's not", text, arg)
 			}
-			dontCare |= arg.BitMask()
+			reserved |= arg.BitMask()
 			args.Delete(i)
 			i--
 		default:
@@ -291,12 +298,13 @@
 	}
 
 	// sanity checks
-	if mask&dontCare != 0 {
-		log.Fatalf("%s: mask (%08x) and don't care (%08x) collide", text, mask, dontCare)
+	if mask&reserved != 0 {
+		log.Fatalf("%s: mask (%08x) and don't care (%08x) collide", text, mask, reserved)
 	}
 	if value&^mask != 0 {
 		log.Fatalf("%s: value (%08x) out of range of mask (%08x)", text, value, mask)
 	}
+
 	var argMask uint32
 	for _, arg := range args {
 		if arg.Bits <= 0 || arg.Bits > 32 || arg.Offs > 31 || arg.Offs <= 0 {
@@ -310,16 +318,57 @@
 		}
 		argMask |= arg.BitMask()
 	}
-	if 1<<32-1 != mask|dontCare|argMask {
+	if 1<<32-1 != mask|reserved|argMask {
 		log.Fatalf("%s: args %v fail to cover all 32 bits", text, args)
 	}
 
+	return
+}
+
+// Parse a row from the CSV describing the instructions, and place the
+// detected instructions into p. One entry may generate multiple intruction
+// entries as each extended mnemonic listed in text is treated like a unique
+// instruction.
+func add(p *Prog, text, mnemonics, encoding, tags string) {
+	// Parse encoding, building size and offset of each field.
+	// The first field in the encoding is the smallest offset.
+	// And note the MSB is bit 0, not bit 31.
+	// Example: "31@0|RS@6|RA@11|///@16|26@21|Rc@31|"
+	var args, pargs Args
+	var pmask, pvalue, presv, resv uint32
+	iword := int8(0)
+	ispfx := false
+
+	// Is this a prefixed instruction?
+	if encoding[0] == ',' {
+		pfields := strings.Split(encoding, ",")[1:]
+
+		if len(pfields) != 2 {
+			fmt.Fprintf(os.Stderr, "%s: Prefixed instruction must be 2 words long.\n", text)
+			return
+		}
+		pargs = parseFields(pfields[0], text, iword)
+		pmask, pvalue, presv = computeMaskValueReserved(pargs, text)
+		// Move to next instruction word
+		iword++
+		encoding = pfields[1]
+		ispfx = true
+	}
+
+	args = parseFields(encoding, text, iword)
+	mask, value, dontCare := computeMaskValueReserved(args, text)
+
+	if ispfx {
+		args = append(args, pargs...)
+	}
+
 	// split mnemonics into individual instructions
 	// example: "b target_addr (AA=0 LK=0)|ba target_addr (AA=1 LK=0)|bl target_addr (AA=0 LK=1)|bla target_addr (AA=1 LK=1)"
 	insts := strings.Split(categoryRe.ReplaceAllString(mnemonics, ""), "|")
 	foundInst := []Inst{}
 	for _, inst := range insts {
 		value, mask := value, mask
+		pvalue, pmask := pvalue, pmask
 		args := args.Clone()
 		if inst == "" {
 			continue
@@ -345,6 +394,9 @@
 			args.Delete(i)
 		}
 		inst := Inst{Text: text, Encoding: parts[1], Value: value, Mask: mask, DontCare: dontCare}
+		if ispfx {
+			inst = Inst{Text: text, Encoding: parts[1], Value: pvalue, Mask: pmask, DontCare: presv, SValue: value, SMask: mask, SDontCare: resv}
+		}
 
 		// order inst.Args according to mnemonics order
 		for i, opr := range operandRe.FindAllString(parts[1], -1) {
@@ -370,9 +422,28 @@
 				} else {
 					opr = "BD"
 				}
-			case "UI", "BO", "BH", "TH", "LEV", "NB", "L", "TO", "FXM", "FC", "U", "W", "FLM", "UIM", "IMM8", "RIC", "PRS", "SHB", "SHW", "ST", "SIX", "PS", "DCM", "DGM", "RMC", "R", "SP", "S", "DM", "CT", "EH", "E", "MO", "WC", "A", "IH", "OC", "DUI", "DUIS", "CY", "SC", "PL", "MP", "N", "IMM", "DRM", "RM":
+
+			case "XMSK", "YMSK", "PMSK", "IX":
+				typ = asm.TypeImmUnsigned
+
+			case "IMM32":
+				typ = asm.TypeImmUnsigned
+				opr = "imm0"
+				opr2 = "imm1"
+
+			// Handle these cases specially. Note IMM is used on
+			// prefixed MMA instructions as a bitmask. Usually, it is a signed value.
+			case "R", "UIM", "IMM":
+				if ispfx {
+					typ = asm.TypeImmUnsigned
+					break
+				}
+				fallthrough
+
+			case "UI", "BO", "BH", "TH", "LEV", "NB", "L", "TO", "FXM", "FC", "U", "W", "FLM", "IMM8", "RIC", "PRS", "SHB", "SHW", "ST", "SIX", "PS", "DCM", "DGM", "RMC", "SP", "S", "DM", "CT", "EH", "E", "MO", "WC", "A", "IH", "OC", "DUI", "DUIS", "CY", "SC", "PL", "MP", "N", "DRM", "RM":
 				typ = asm.TypeImmUnsigned
 				if i := args.Find(opr); i < 0 {
+					log.Printf("coerce to D: %s: couldn't find extended field %s in %s", text, opr, args)
 					opr = "D"
 				}
 			case "bm":
@@ -393,6 +464,12 @@
 					opr = n // xx[5] || xx[0:4]
 				}
 			case "SI", "SIM", "TE":
+				if ispfx {
+					typ = asm.TypeImmSigned
+					opr = "si0"
+					opr2 = "si1"
+					break
+				}
 				typ = asm.TypeImmSigned
 				if i := args.Find(opr); i < 0 {
 					opr = "D"
@@ -414,6 +491,12 @@
 				typ = asm.TypeOffset
 				shift = 4
 			case "D":
+				if ispfx {
+					typ = asm.TypeOffset
+					opr = "d0"
+					opr2 = "d1"
+					break
+				}
 				if i := args.Find(opr); i >= 0 {
 					typ = asm.TypeOffset
 					break
@@ -475,6 +558,7 @@
 
 			case "VRA", "VRB", "VRC", "VRS", "VRT":
 				typ = asm.TypeVecReg
+
 			case "SPR", "DCRN", "BHRBE", "TBR", "SR", "TMR", "PMRN": // Note: if you add to this list and the register field needs special handling, add it to switch statement below
 				typ = asm.TypeSpReg
 				switch opr {
@@ -496,28 +580,28 @@
 				b0 := args.Find(opr)
 				b1 := args.Find(opr2)
 				b2 := args.Find(opr3)
-				f1.Offs, f1.Bits = uint8(args[b0].Offs), uint8(args[b0].Bits)
-				f2.Offs, f2.Bits = uint8(args[b1].Offs), uint8(args[b1].Bits)
-				f3.Offs, f3.Bits = uint8(args[b2].Offs), uint8(args[b2].Bits)
+				f1.Offs, f1.Bits, f1.Word = uint8(args[b0].Offs), uint8(args[b0].Bits), uint8(args[b0].Word)
+				f2.Offs, f2.Bits, f2.Word = uint8(args[b1].Offs), uint8(args[b1].Bits), uint8(args[b1].Word)
+				f3.Offs, f3.Bits, f3.Word = uint8(args[b2].Offs), uint8(args[b2].Bits), uint8(args[b2].Word)
 
 			case opr2 != "":
 				ext := args.Find(opr)
 				if ext < 0 {
 					log.Fatalf("%s: couldn't find extended field %s in %s", text, opr, args)
 				}
-				f1.Offs, f1.Bits = uint8(args[ext].Offs), uint8(args[ext].Bits)
+				f1.Offs, f1.Bits, f1.Word = uint8(args[ext].Offs), uint8(args[ext].Bits), uint8(args[ext].Word)
 				base := args.Find(opr2)
 				if base < 0 {
 					log.Fatalf("%s: couldn't find base field %s in %s", text, opr2, args)
 				}
-				f2.Offs, f2.Bits = uint8(args[base].Offs), uint8(args[base].Bits)
+				f2.Offs, f2.Bits, f2.Word = uint8(args[base].Offs), uint8(args[base].Bits), uint8(args[base].Word)
 			case opr == "mb", opr == "me": // xx[5] || xx[0:4]
 				i := args.Find(opr)
 				if i < 0 {
 					log.Fatalf("%s: couldn't find special 'm[be]' field for %s in %s", text, opr, args)
 				}
-				f1.Offs, f1.Bits = uint8(args[i].Offs+args[i].Bits)-1, 1
-				f2.Offs, f2.Bits = uint8(args[i].Offs), uint8(args[i].Bits)-1
+				f1.Offs, f1.Bits, f1.Word = uint8(args[i].Offs+args[i].Bits)-1, 1, uint8(args[i].Word)
+				f2.Offs, f2.Bits, f2.Word = uint8(args[i].Offs), uint8(args[i].Bits)-1, uint8(args[i].Word)
 			case opr == "spr", opr == "tbr", opr == "tmr", opr == "dcr": // spr[5:9] || spr[0:4]
 				i := args.Find(opr)
 				if i < 0 {
@@ -526,14 +610,14 @@
 				if args[i].Bits != 10 {
 					log.Fatalf("%s: special 'spr' field is not 10-bit: %s", text, args)
 				}
-				f1.Offs, f1.Bits = uint8(args[i].Offs)+5, 5
-				f2.Offs, f2.Bits = uint8(args[i].Offs), 5
+				f1.Offs, f1.Bits, f2.Word = uint8(args[i].Offs)+5, 5, uint8(args[i].Word)
+				f2.Offs, f2.Bits, f2.Word = uint8(args[i].Offs), 5, uint8(args[i].Word)
 			default:
 				i := args.Find(opr)
 				if i < 0 {
 					log.Fatalf("%s: couldn't find %s in %s", text, opr, args)
 				}
-				f1.Offs, f1.Bits = uint8(args[i].Offs), uint8(args[i].Bits)
+				f1.Offs, f1.Bits, f1.Word = uint8(args[i].Offs), uint8(args[i].Bits), uint8(args[i].Word)
 			}
 			field.BitFields.Append(f1)
 			if f2.Bits > 0 {
@@ -598,7 +682,7 @@
 func argFieldName(f Field) string {
 	ns := []string{"ap", f.Type.String()}
 	for _, b := range f.BitFields {
-		ns = append(ns, fmt.Sprintf("%d_%d", b.Offs, b.Offs+b.Bits-1))
+		ns = append(ns, fmt.Sprintf("%d_%d", b.Word*32+b.Offs, b.Word*32+b.Offs+b.Bits-1))
 	}
 	if f.Shift > 0 {
 		ns = append(ns, fmt.Sprintf("shift%d", f.Shift))
@@ -657,7 +741,7 @@
 			m[name] = true
 			fmt.Fprintf(&buf, "\t%s = &argField{Type: %#v, Shift: %d, BitFields: BitFields{", name, f.Type, f.Shift)
 			for _, b := range f.BitFields {
-				fmt.Fprintf(&buf, "{%d, %d},", b.Offs, b.Bits)
+				fmt.Fprintf(&buf, "{%d, %d, %d},", b.Offs, b.Bits, b.Word)
 			}
 			fmt.Fprintf(&buf, "}}\n")
 		}
@@ -668,7 +752,7 @@
 	fmt.Fprintf(&buf, "var instFormats = [...]instFormat{\n")
 	for _, inst := range p.Insts {
 		fmt.Fprintf(&buf, "\t{ %s, %#x, %#x, %#x,", opName(inst.Op), inst.Mask, inst.Value, inst.DontCare)
-		fmt.Fprintf(&buf, " // %s (%s)\n\t\t[5]*argField{", inst.Text, inst.Encoding)
+		fmt.Fprintf(&buf, " // %s (%s)\n\t\t[6]*argField{", inst.Text, inst.Encoding)
 		for _, f := range inst.Fields {
 			fmt.Fprintf(&buf, "%s, ", argFieldName(f))
 		}