x86/x86avxgen: enable AVX512 encoder tables generation

Now generates both VEX and EVEX encoded optabs.

Encoder based on these optabs passes tests added in
https://golang.org/cl/107217.

This version uses XED datafiles directly instead of x86.csv.

Also moves x86/x86spec/xeddata package to x86/xeddata to make it
usable from x86 packages.
Ported x86spec pattern set type to xeddata.

Updates golang/go#22779

Change-Id: I304267d888dcda4f776d1241efa524f397a8b7b3
Reviewed-on: https://go-review.googlesource.com/107216
Run-TryBot: Iskander Sharipov <iskander.sharipov@intel.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: David Chase <drchase@google.com>
diff --git a/x86/x86avxgen/avxgen.go b/x86/x86avxgen/avxgen.go
deleted file mode 100644
index bd3b40a..0000000
--- a/x86/x86avxgen/avxgen.go
+++ /dev/null
@@ -1,151 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package main
-
-import (
-	"strings"
-
-	"golang.org/x/arch/x86/x86csv"
-)
-
-// An encoding is the parsed x86csv.Inst Encoding.
-type encoding struct {
-	vex     string // Dot separated VEX prefix. e.g. "VEX.NDD.256.66.0F.WIG"
-	opbyte  string // Single opcode encoding byte (example: "38")
-	opdigit string // "/digit" byte that extends the opcode (example: "7" for /7)
-}
-
-// parseEncoding parses x86csv.Inst Encoding.
-func parseEncoding(encString string) encoding {
-	f := strings.Fields(encString)
-	enc := encoding{
-		vex:    f[0],
-		opbyte: f[1],
-	}
-
-	// Parse rest parts.
-	// Currently interested only in "/digit" byte,
-	// but that may change later.
-	for _, p := range f[2:] {
-		switch p {
-		case "/r", "/is4":
-			// Currently not handled.
-
-		case "/0", "/1", "/2", "/3", "/4", "/5", "/6", "/7":
-			enc.opdigit = p[len("/"):]
-		}
-	}
-
-	return enc
-}
-
-// ytabID is a name of "x86/asm6.go" ytab table object.
-//
-// ytabMap contains all IDs that can be referenced
-// from generated Optabs.
-type ytabID string
-
-// optab holds data that is required to emit x86 optab entry.
-//
-// That is, it is not "the optab" itself, but a set
-// of parameters required to expand a template.
-//
-// Terminology differences:
-// x86csv   | asm6.go
-// ------------------
-// opcode   | as
-// encoding | op
-// ------------------
-// We use asm6.go terminology only in description of this structure,
-// as it describes asm6.go object.
-type optab struct {
-	// Prefix is fixed to "Pvex" right now.
-	// This may change when EVEX-encoded instructions
-	// generation is supported.
-
-	as     string   // AXXX constant name without leading "A" (example: ADD for AADD)
-	ytabID ytabID   // ytab table name (example: yvex_y2)
-	op     []string // Encoding parts
-}
-
-// doGroups groups instructions in insts by Go name and then calls
-// f for each different name, passing the name and the instructions
-// using that name. The calls are made ordered by first appearance
-// of name in insts, and the list of instructions for a given name
-// are in the same order as in insts.
-func doGroups(insts []*x86csv.Inst, f func(string, []*x86csv.Inst)) {
-	var opcodes []string
-	groups := make(map[string][]*x86csv.Inst)
-	for _, inst := range insts {
-		op := inst.GoOpcode()
-		if groups[op] == nil {
-			opcodes = append(opcodes, op)
-		}
-		groups[op] = append(groups[op], inst)
-	}
-	for _, op := range opcodes {
-		f(op, groups[op])
-	}
-}
-
-// argsNormalizer is used to transform Intel manual style args (operands)
-// to shorter form. Compact form is used in compound keys (see ytabMap).
-//
-// asm6.go (x86 asm backend) does not care about:
-// - memory operand sizes. There are distinct instructions for different sizes.
-// - register indexes. "xmm1" or "xmm" - does not matter.
-var argsNormalizer = strings.NewReplacer(
-	", ", ",",
-	" ", "",
-
-	"imm8", "i8",
-
-	"m8", "m",
-	"m16", "m",
-	"m32", "m",
-	"m64", "m",
-	"m128", "m",
-	"m256", "m",
-
-	"r32", "r",
-	"r64", "r",
-
-	"xmm1", "x",
-	"xmm2", "x",
-	"xmm3", "x",
-	"xmm", "x",
-
-	"ymm1", "y",
-	"ymm2", "y",
-	"ymm3", "y",
-	"ymm", "y",
-)
-
-// ytabKey computes a key describing the operand forms from insts for ytabMap.
-// This lets us find instructions with the same groups of forms and
-// have them share a ytab entry.
-func ytabKey(op string, insts []*x86csv.Inst) string {
-	var all []string
-	for _, inst := range insts {
-		form := argsNormalizer.Replace(inst.Go[len(op):])
-		all = append(all, form)
-	}
-	return strings.Join(all, ";")
-}
-
-// vexExpr returns the Go expression describing the VEX prefix.
-//
-// Examples:
-//   "VEX.NDS.256.0F.WIG" => "vexNDS|vex256|vex0F|vexWIG"
-//   "VEX.256.0F.WIG"     => "vexNOVSR|vex256|vex0F|vexWIG"
-func vexExpr(vex string) string {
-	expr := strings.Replace(vex, ".", "|vex", -1)[len("VEX|"):]
-	for _, p := range [...]string{"vexNDS", "vexNDD", "vexDDS"} {
-		if strings.HasPrefix(expr, p) {
-			return expr
-		}
-	}
-	return "vexNOVSR|" + expr
-}
diff --git a/x86/x86avxgen/avxgen_test.go b/x86/x86avxgen/avxgen_test.go
index bbddf67..e2f5b24 100644
--- a/x86/x86avxgen/avxgen_test.go
+++ b/x86/x86avxgen/avxgen_test.go
@@ -1,4 +1,4 @@
-// Copyright 2017 The Go Authors. All rights reserved.
+// Copyright 2018 The Go Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
@@ -6,371 +6,908 @@
 
 import (
 	"bytes"
+	"io/ioutil"
+	"path/filepath"
 	"regexp"
 	"strings"
+	"sync"
 	"testing"
 
-	"golang.org/x/arch/x86/x86csv"
+	"golang.org/x/arch/x86/xeddata"
 )
 
-func TestYtabKey(t *testing.T) {
-	type testCase struct {
-		insts []*x86csv.Inst
-		key   string
-	}
-	test := func(key string, goSyntaxes ...string) testCase {
-		insts := make([]*x86csv.Inst, len(goSyntaxes))
-		for i, stx := range goSyntaxes {
-			insts[i] = &x86csv.Inst{Go: stx}
-		}
-		return testCase{insts: insts, key: key}
-	}
-	tests := []testCase{
-		test("", "VZEROALL"),
-		test("i8,x/m,x", "VAESKEYGENASSIST imm8, xmm2/m128, xmm1"),
-		test("x/m,xV,x;y/m,yV,y", "VADDPD xmm2/m128, xmmV, xmm1", "VADDPD ymm2/m256, ymmV, ymm1"),
-		test("x/m,xV,x", "VADDSD xmm2/m64, xmmV, xmm1"),
-		test("x/m,x", "VAESIMC xmm2/m128, xmm1"),
-		test("r/m,i8;r/m,i8;r/m,i8;r/m,i8", "XOR r/m16, imm8", "XOR r/m32, imm8", "XOR r/m64, imm8", "XOR r/m8, imm8"),
-		test("xV,yV", "OP1 xmm1V, ymm2V "),
-		test("x,y", "OP2  xmm, ymm"),
-		test("x/m,r/m", "OP3 xmm3/m32,r/m32"),
-	}
-	for _, test := range tests {
-		op := test.insts[0].GoOpcode()
-		key := ytabKey(op, test.insts)
-		if key != test.key {
-			t.Errorf("ytabKey(%s, ...)\nwant: '%s'\nhave: '%s'", op, key, test.key)
-		}
-	}
-}
-
-func TestVexExpr(t *testing.T) {
-	tests := map[string]string{
-		"VEX.NDS.256.0F.WIG":    "vexNDS|vex256|vex0F|vexWIG",
-		"VEX.256.66.0F.WIG":     "vexNOVSR|vex256|vex66|vex0F|vexWIG",
-		"VEX.128.66.0F38.WIG":   "vexNOVSR|vex128|vex66|vex0F38|vexWIG",
-		"VEX.NDS.LIG.F2.0F.WIG": "vexNDS|vexLIG|vexF2|vex0F|vexWIG",
-		"VEX.NDD.LIG.F2.0F.WIG": "vexNDD|vexLIG|vexF2|vex0F|vexWIG",
-		"VEX.DDS.LIG.F2.0F.WIG": "vexDDS|vexLIG|vexF2|vex0F|vexWIG",
-		"VEX.NDS.0F":            "vexNDS|vex0F",
-		"VEX.0F":                "vexNOVSR|vex0F",
-		"VEX.0F.W0":             "vexNOVSR|vex0F|vexW0",
-		"VEX.66.0F.W1":          "vexNOVSR|vex66|vex0F|vexW1",
-	}
-
-	for input, want := range tests {
-		have := vexExpr(input)
-		if have != want {
-			t.Errorf("vexPrefixExpr(%q)\nwant: %#v\nhave: %#v", input, want, have)
-		}
-	}
-}
-
-func TestParseEncoding(t *testing.T) {
-	tests := map[string]encoding{
-		"VEX.LZ.0F38.W0 F2 /r": {
-			vex:    "VEX.LZ.0F38.W0",
-			opbyte: "F2",
-		},
-		"VEX.NDD.LZ.0F38.W0 F3 /1": {
-			vex:     "VEX.NDD.LZ.0F38.W0",
-			opbyte:  "F3",
-			opdigit: "1",
-		},
-		"VEX.NDS.128.66.0F3A.W0 4B /r /is4": {
-			vex:    "VEX.NDS.128.66.0F3A.W0",
-			opbyte: "4B",
-		},
-	}
-
-	for input, want := range tests {
-		have := parseEncoding(input)
-		if have != want {
-			t.Errorf("vexPrefixExpr(%q)\nwant: %#v\nhave: %#v", input, want, have)
-		}
-	}
-}
-
-func TestGenerateAenum(t *testing.T) {
-	input := `// Code generated by x86avxgen; DO NOT EDIT.
-package eax
-
-const (
-	unrelatedOne = iota
-	unrelatedTwo
-)
-
-/*
-Leading multiline comment;
-Line 2.
-*/
-//go:generate go run ../stringer.go -i $GOFILE -o anames.go -p x86
-const (
-	AAAA = iota + 4*iota // AAAA comment.
-	ACCC                 // ACCC comment.
-	ABBB
-	AFFF  // AFFF comment.
-	ALAST // ALAST comment.
-)
-
-// Top-level floating comment.
-`
-
-	expected := `// Code generated by x86avxgen; DO NOT EDIT.
-package eax
-
-const (
-	unrelatedOne = iota
-	unrelatedTwo
-)
-
-// Top-level floating comment.
-
-/*
-Leading multiline comment;
-Line 2.
-*/
-//go:generate go run ../stringer.go -i $GOFILE -o anames.go -p x86
-const (
-	AAAA = iota + 4*iota // AAAA comment.
-	ABBB
-	ACCC // ACCC comment.
-	ADDD
-	AEEE
-	AFFF // AFFF comment.
-	AZZZ
-	ALAST // ALAST comment.
-)
-`
-	r := strings.NewReader(input)
-	var buf bytes.Buffer
-	err := generateAenum(r, &buf, []string{
-		"ZZZ",
-		"EEE",
-		"DDD",
-	})
+func newTestContext(t testing.TB) *context {
+	ctx := &context{xedPath: filepath.Join("testdata", "xedpath")}
+	db, err := xeddata.NewDatabase(ctx.xedPath)
 	if err != nil {
-		t.Fatal(err)
+		t.Fatalf("open test database: %v", err)
 	}
-	output := buf.String()
-	if expected != output {
-		t.Errorf("output mismatch:\nwant: %s\nhave: %s",
-			expected, output)
-	}
+	ctx.db = db
+	return ctx
 }
 
-func TestUncommentTestLine(t *testing.T) {
-	// Note that is should also fix XMM0 to X0.
-	input := `
-	//TODO: ADCXL (BX), DX                  // 660f38f613
-	//TODO: ADCXL (R11), DX                 // 66410f38f613
-	//TODO: ADDSUBPD (BX), X2               // 660fd013
-	//TODO: BLENDVPD XMM0, (BX), X2         // 660f381513`
-	want := `
-	ADCXL (BX), DX                          // 660f38f613
-	ADCXL (R11), DX                         // 66410f38f613
-	ADDSUBPD (BX), X2                       // 660fd013
-	BLENDVPD X0, (BX), X2                   // 660f381513`
+func newStringSet(keys ...string) map[string]bool {
+	set := make(map[string]bool)
+	for _, k := range keys {
+		set[k] = true
+	}
+	return set
+}
 
-	lines := strings.Split(input, "\n")
-	for i := range lines {
-		if len(lines[i]) > 10 {
-			lines[i] = uncommentedTestLine(lines[i])
-		}
-	}
-	have := strings.Join(lines, "\n")
-	if want != have {
-		t.Errorf("output mismatch:\nwant: `%s`\nhave: `%s`",
-			want, have)
-	}
+func generateToString(t *testing.T) string {
+	ctx := newTestContext(t)
+	buildTables(ctx)
+	var buf bytes.Buffer
+	writeTables(&buf, ctx)
+	return buf.String()
 }
 
 func TestOutput(t *testing.T) {
-	// Using already existing AVX optabs to check generated output.
-	// This does not cover new instructions though.
+	// Ytab lists and optabs output checks.
+	//
+	// These tests are very fragile.
+	// Slight changes can invalidate them.
+	// It is better to keep testCases count at the minimum.
 
-	// These lines can be retrieved by:
-	// $ grep ', Pvex,' src/cmd/internal/obj/x86/asm6.go
-	existingOptabs := `
-	{AANDNL, yvex_r3, Pvex, [23]uint8{VEX_NDS_LZ_0F38_W0, 0xF2}},
-	{AANDNQ, yvex_r3, Pvex, [23]uint8{VEX_NDS_LZ_0F38_W1, 0xF2}},
-	{ABEXTRL, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_0F38_W0, 0xF7}},
-	{ABEXTRQ, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_0F38_W1, 0xF7}},
-	{ABZHIL, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_0F38_W0, 0xF5}},
-	{ABZHIQ, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_0F38_W1, 0xF5}},
-	{AMULXL, yvex_r3, Pvex, [23]uint8{VEX_NDD_LZ_F2_0F38_W0, 0xF6}},
-	{AMULXQ, yvex_r3, Pvex, [23]uint8{VEX_NDD_LZ_F2_0F38_W1, 0xF6}},
-	{APDEPL, yvex_r3, Pvex, [23]uint8{VEX_NDS_LZ_F2_0F38_W0, 0xF5}},
-	{APDEPQ, yvex_r3, Pvex, [23]uint8{VEX_NDS_LZ_F2_0F38_W1, 0xF5}},
-	{APEXTL, yvex_r3, Pvex, [23]uint8{VEX_NDS_LZ_F3_0F38_W0, 0xF5}},
-	{APEXTQ, yvex_r3, Pvex, [23]uint8{VEX_NDS_LZ_F3_0F38_W1, 0xF5}},
-	{ASARXL, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_F3_0F38_W0, 0xF7}},
-	{ASARXQ, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_F3_0F38_W1, 0xF7}},
-	{ASHLXL, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_66_0F38_W0, 0xF7}},
-	{ASHLXQ, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_66_0F38_W1, 0xF7}},
-	{ASHRXL, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_F2_0F38_W0, 0xF7}},
-	{ASHRXQ, yvex_vmr3, Pvex, [23]uint8{VEX_NDS_LZ_F2_0F38_W1, 0xF7}},
-	{AVMOVDQU, yvex_vmovdqa, Pvex, [23]uint8{VEX_NOVSR_128_F3_0F_WIG, 0x6F, VEX_NOVSR_128_F3_0F_WIG, 0x7F, VEX_NOVSR_256_F3_0F_WIG, 0x6F, VEX_NOVSR_256_F3_0F_WIG, 0x7F}},
-	{AVMOVDQA, yvex_vmovdqa, Pvex, [23]uint8{VEX_NOVSR_128_66_0F_WIG, 0x6F, VEX_NOVSR_128_66_0F_WIG, 0x7F, VEX_NOVSR_256_66_0F_WIG, 0x6F, VEX_NOVSR_256_66_0F_WIG, 0x7F}},
-	{AVMOVNTDQ, yvex_vmovntdq, Pvex, [23]uint8{VEX_NOVSR_128_66_0F_WIG, 0xE7, VEX_NOVSR_256_66_0F_WIG, 0xE7}},
-	{AVPCMPEQB, yvex_xy3, Pvex, [23]uint8{VEX_NDS_128_66_0F_WIG, 0x74, VEX_NDS_256_66_0F_WIG, 0x74}},
-	{AVPXOR, yvex_xy3, Pvex, [23]uint8{VEX_NDS_128_66_0F_WIG, 0xEF, VEX_NDS_256_66_0F_WIG, 0xEF}},
-	{AVPMOVMSKB, yvex_xyr2, Pvex, [23]uint8{VEX_NOVSR_128_66_0F_WIG, 0xD7, VEX_NOVSR_256_66_0F_WIG, 0xD7}},
-	{AVPAND, yvex_xy3, Pvex, [23]uint8{VEX_NDS_128_66_0F_WIG, 0xDB, VEX_NDS_256_66_0F_WIG, 0xDB}},
-	{AVPBROADCASTB, yvex_vpbroadcast, Pvex, [23]uint8{VEX_NOVSR_128_66_0F38_W0, 0x78, VEX_NOVSR_256_66_0F38_W0, 0x78}},
-	{AVPTEST, yvex_xy2, Pvex, [23]uint8{VEX_NOVSR_128_66_0F38_WIG, 0x17, VEX_NOVSR_256_66_0F38_WIG, 0x17}},
-	{AVPSHUFB, yvex_xy3, Pvex, [23]uint8{VEX_NDS_128_66_0F38_WIG, 0x00, VEX_NDS_256_66_0F38_WIG, 0x00}},
-	{AVPSHUFD, yvex_xyi3, Pvex, [23]uint8{VEX_NOVSR_128_66_0F_WIG, 0x70, VEX_NOVSR_256_66_0F_WIG, 0x70, VEX_NOVSR_128_66_0F_WIG, 0x70, VEX_NOVSR_256_66_0F_WIG, 0x70}},
-	{AVPOR, yvex_xy3, Pvex, [23]uint8{VEX_NDS_128_66_0F_WIG, 0xeb, VEX_NDS_256_66_0F_WIG, 0xeb}},
-	{AVPADDQ, yvex_xy3, Pvex, [23]uint8{VEX_NDS_128_66_0F_WIG, 0xd4, VEX_NDS_256_66_0F_WIG, 0xd4}},
-	{AVPADDD, yvex_xy3, Pvex, [23]uint8{VEX_NDS_128_66_0F_WIG, 0xfe, VEX_NDS_256_66_0F_WIG, 0xfe}},
-	{AVADDSD, yvex_x3, Pvex, [23]uint8{VEX_NDS_128_F2_0F_WIG, 0x58}},
-	{AVSUBSD, yvex_x3, Pvex, [23]uint8{VEX_NDS_128_F2_0F_WIG, 0x5c}},
-	{AVFMADD213SD, yvex_x3, Pvex, [23]uint8{VEX_DDS_LIG_66_0F38_W1, 0xa9}},
-	{AVFMADD231SD, yvex_x3, Pvex, [23]uint8{VEX_DDS_LIG_66_0F38_W1, 0xb9}},
-	{AVFNMADD213SD, yvex_x3, Pvex, [23]uint8{VEX_DDS_LIG_66_0F38_W1, 0xad}},
-	{AVFNMADD231SD, yvex_x3, Pvex, [23]uint8{VEX_DDS_LIG_66_0F38_W1, 0xbd}},
-	{AVPSLLD, yvex_shift, Pvex, [23]uint8{VEX_NDS_128_66_0F_WIG, 0x72, 0xf0, VEX_NDS_256_66_0F_WIG, 0x72, 0xf0, VEX_NDD_128_66_0F_WIG, 0xf2, VEX_NDD_256_66_0F_WIG, 0xf2}},
-	{AVPSLLQ, yvex_shift, Pvex, [23]uint8{VEX_NDD_128_66_0F_WIG, 0x73, 0xf0, VEX_NDD_256_66_0F_WIG, 0x73, 0xf0, VEX_NDS_128_66_0F_WIG, 0xf3, VEX_NDS_256_66_0F_WIG, 0xf3}},
-	{AVPSRLD, yvex_shift, Pvex, [23]uint8{VEX_NDD_128_66_0F_WIG, 0x72, 0xd0, VEX_NDD_256_66_0F_WIG, 0x72, 0xd0, VEX_NDD_128_66_0F_WIG, 0xd2, VEX_NDD_256_66_0F_WIG, 0xd2}},
-	{AVPSRLQ, yvex_shift, Pvex, [23]uint8{VEX_NDD_128_66_0F_WIG, 0x73, 0xd0, VEX_NDD_256_66_0F_WIG, 0x73, 0xd0, VEX_NDS_128_66_0F_WIG, 0xd3, VEX_NDS_256_66_0F_WIG, 0xd3}},
-	{AVPSRLDQ, yvex_shift_dq, Pvex, [23]uint8{VEX_NDD_128_66_0F_WIG, 0x73, 0xd8, VEX_NDD_256_66_0F_WIG, 0x73, 0xd8}},
-	{AVPSLLDQ, yvex_shift_dq, Pvex, [23]uint8{VEX_NDD_128_66_0F_WIG, 0x73, 0xf8, VEX_NDD_256_66_0F_WIG, 0x73, 0xf8}},
-	{AVPERM2F128, yvex_yyi4, Pvex, [23]uint8{VEX_NDS_256_66_0F3A_W0, 0x06}},
-	{AVPALIGNR, yvex_yyi4, Pvex, [23]uint8{VEX_NDS_256_66_0F3A_WIG, 0x0f}},
-	{AVPBLENDD, yvex_yyi4, Pvex, [23]uint8{VEX_NDS_256_66_0F3A_WIG, 0x02}},
-	{AVINSERTI128, yvex_xyi4, Pvex, [23]uint8{VEX_NDS_256_66_0F3A_WIG, 0x38}},
-	{AVPERM2I128, yvex_yyi4, Pvex, [23]uint8{VEX_NDS_256_66_0F3A_WIG, 0x46}},
-	{ARORXL, yvex_ri3, Pvex, [23]uint8{VEX_NOVSR_LZ_F2_0F3A_W0, 0xf0}},
-	{ARORXQ, yvex_ri3, Pvex, [23]uint8{VEX_NOVSR_LZ_F2_0F3A_W1, 0xf0}},
-	{AVBROADCASTSD, yvex_vpbroadcast_sd, Pvex, [23]uint8{VEX_NOVSR_256_66_0F38_W0, 0x19}},
-	{AVBROADCASTSS, yvex_vpbroadcast, Pvex, [23]uint8{VEX_NOVSR_128_66_0F38_W0, 0x18, VEX_NOVSR_256_66_0F38_W0, 0x18}},
-	{AVMOVDDUP, yvex_xy2, Pvex, [23]uint8{VEX_NOVSR_128_F2_0F_WIG, 0x12, VEX_NOVSR_256_F2_0F_WIG, 0x12}},
-	{AVMOVSHDUP, yvex_xy2, Pvex, [23]uint8{VEX_NOVSR_128_F3_0F_WIG, 0x16, VEX_NOVSR_256_F3_0F_WIG, 0x16}},
-	{AVMOVSLDUP, yvex_xy2, Pvex, [23]uint8{VEX_NOVSR_128_F3_0F_WIG, 0x12, VEX_NOVSR_256_F3_0F_WIG, 0x12}},`
-
-	// Preprocess "existingOptabs" to make generated output comparable.
+	type testCase struct {
+		opcode     string
+		ytabs      string
+		optabLines string
+	}
+	var testCases []testCase
 	{
-		// 1. Inline prefix byte expressions.
-		//    $ egrep 'VEX_[_A-Z0-9]+\s*=' src/cmd/internal/obj/x86/asm6.go
-		prefixFixer := strings.NewReplacer(
-			"VEX_DDS_LIG_66_0F38_W1", "vexDDS | vexLIG | vex66 | vex0F38 | vexW1",
-			"VEX_NDD_128_66_0F_WIG", "vexNDD | vex128 | vex66 | vex0F | vexWIG",
-			"VEX_NDD_256_66_0F_WIG", "vexNDD | vex256 | vex66 | vex0F | vexWIG",
-			"VEX_NDD_LZ_F2_0F38_W0", "vexNDD | vexLZ | vexF2 | vex0F38 | vexW0",
-			"VEX_NDD_LZ_F2_0F38_W1", "vexNDD | vexLZ | vexF2 | vex0F38 | vexW1",
-			"VEX_NDS_128_66_0F_WIG", "vexNDS | vex128 | vex66 | vex0F | vexWIG",
-			"VEX_NDS_128_66_0F38_WIG", "vexNDS | vex128 | vex66 | vex0F38 | vexWIG",
-			"VEX_NDS_128_F2_0F_WIG", "vexNDS | vex128 | vexF2 | vex0F | vexWIG",
-			"VEX_NDS_256_66_0F_WIG", "vexNDS | vex256 | vex66 | vex0F | vexWIG",
-			"VEX_NDS_256_66_0F38_WIG", "vexNDS | vex256 | vex66 | vex0F38 | vexWIG",
-			"VEX_NDS_256_66_0F3A_W0", "vexNDS | vex256 | vex66 | vex0F3A | vexW0",
-			"VEX_NDS_256_66_0F3A_WIG", "vexNDS | vex256 | vex66 | vex0F3A | vexWIG",
-			"VEX_NDS_LZ_0F38_W0", "vexNDS | vexLZ | vex0F38 | vexW0",
-			"VEX_NDS_LZ_0F38_W1", "vexNDS | vexLZ | vex0F38 | vexW1",
-			"VEX_NDS_LZ_66_0F38_W0", "vexNDS | vexLZ | vex66 | vex0F38 | vexW0",
-			"VEX_NDS_LZ_66_0F38_W1", "vexNDS | vexLZ | vex66 | vex0F38 | vexW1",
-			"VEX_NDS_LZ_F2_0F38_W0", "vexNDS | vexLZ | vexF2 | vex0F38 | vexW0",
-			"VEX_NDS_LZ_F2_0F38_W1", "vexNDS | vexLZ | vexF2 | vex0F38 | vexW1",
-			"VEX_NDS_LZ_F3_0F38_W0", "vexNDS | vexLZ | vexF3 | vex0F38 | vexW0",
-			"VEX_NDS_LZ_F3_0F38_W1", "vexNDS | vexLZ | vexF3 | vex0F38 | vexW1",
-			"VEX_NOVSR_128_66_0F_WIG", "vexNOVSR | vex128 | vex66 | vex0F | vexWIG",
-			"VEX_NOVSR_128_66_0F38_W0", "vexNOVSR | vex128 | vex66 | vex0F38 | vexW0",
-			"VEX_NOVSR_128_66_0F38_WIG", "vexNOVSR | vex128 | vex66 | vex0F38 | vexWIG",
-			"VEX_NOVSR_128_F2_0F_WIG", "vexNOVSR | vex128 | vexF2 | vex0F | vexWIG",
-			"VEX_NOVSR_128_F3_0F_WIG", "vexNOVSR | vex128 | vexF3 | vex0F | vexWIG",
-			"VEX_NOVSR_256_66_0F_WIG", "vexNOVSR | vex256 | vex66 | vex0F | vexWIG",
-			"VEX_NOVSR_256_66_0F38_W0", "vexNOVSR | vex256 | vex66 | vex0F38 | vexW0",
-			"VEX_NOVSR_256_66_0F38_WIG", "vexNOVSR | vex256 | vex66 | vex0F38 | vexWIG",
-			"VEX_NOVSR_256_F2_0F_WIG", "vexNOVSR | vex256 | vexF2 | vex0F | vexWIG",
-			"VEX_NOVSR_256_F3_0F_WIG", "vexNOVSR | vex256 | vexF3 | vex0F | vexWIG",
-			"VEX_NOVSR_LZ_F2_0F3A_W0", "vexNOVSR | vexLZ | vexF2 | vex0F3A | vexW0",
-			"VEX_NOVSR_LZ_F2_0F3A_W1", "vexNOVSR | vexLZ | vexF2 | vex0F3A | vexW1",
-		)
-		existingOptabs = prefixFixer.Replace(existingOptabs)
-
-		// 2. Normalize hex literals.
-		//    Some optabs use 0xaa style, others use 0xAA.
-		//    Generated optabs always use upper case style (as in x86.csv).
-		rxHexLit := regexp.MustCompile(` 0x[0-9a-f]{2}`)
-		existingOptabs = rxHexLit.ReplaceAllStringFunc(existingOptabs, func(m string) string {
-			return " 0x" + strings.ToUpper(m[len(" 0x"):])
-		})
+		opcodeRE := regexp.MustCompile(`as: ([A-Z][A-Z0-9]*)`)
+		data, err := ioutil.ReadFile(filepath.Join("testdata", "golden.txt"))
+		if err != nil {
+			t.Fatalf("read golden file: %v", err)
+		}
+		for _, entry := range bytes.Split(data, []byte("======")) {
+			parts := bytes.Split(entry, []byte("----"))
+			ytabs := parts[0]
+			optabLines := parts[1]
+			opcode := opcodeRE.FindSubmatch(optabLines)[1]
+			testCases = append(testCases, testCase{
+				ytabs:      strings.TrimSpace(string(ytabs)),
+				optabLines: strings.TrimSpace(string(optabLines)),
+				opcode:     string(opcode)[len("A"):],
+			})
+		}
 	}
 
-	r, err := specRowReader("../" + specFile)
-	if err != nil {
-		t.Fatalf("open row reader: %v", err)
+	output := generateToString(t)
+	for _, tc := range testCases {
+		if !strings.Contains(output, tc.ytabs) {
+			t.Errorf("%s: ytabs not matched", tc.opcode)
+		}
+		if !strings.Contains(output, tc.optabLines) {
+			t.Errorf("%s: optab lines not matched", tc.opcode)
+		}
 	}
-	var newOptabs bytes.Buffer
-	_, err = doGenerateVexOptabs(r, &newOptabs)
-	if err != nil {
-		t.Fatalf("generate vex optabs: %v", err)
-	}
+}
 
-	rxOptabID := regexp.MustCompile(`[A-Z_][A-Z_0-9]*`)
-	linesToMap := func(lines []string) map[string]string {
-		m := make(map[string]string, len(lines))
-		for _, l := range lines {
-			name := rxOptabID.FindString(l)
-			if name != "" {
-				m[name] = l
+func TestOutputStability(t *testing.T) {
+	// Generate output count+1 times and check that every time
+	// it is exactly the same string.
+	//
+	// The output should be deterministic to avoid unwanted diffs
+	// between each code generation.
+	const count = 8
+
+	want := generateToString(t)
+	var wg sync.WaitGroup
+	for i := 0; i < count; i++ {
+		wg.Add(1)
+		go func(i int) {
+			if want != generateToString(t) {
+				t.Errorf("output #%d mismatches", i)
 			}
-		}
-		return m
+			wg.Done()
+		}(i)
+	}
+	wg.Wait()
+}
+
+func TestOpcodeCoverage(t *testing.T) {
+	// Check that generator produces all expected opcodes from testdata files.
+	// All opcodes are in Go syntax.
+
+	// VEX/EVEX opcodes collected from XED-based x86.csv.
+	expectedOpcodes := newStringSet(
+		"ANDNL",
+		"ANDNQ",
+		"BEXTRL",
+		"BEXTRQ",
+		"BLSIL",
+		"BLSIQ",
+		"BLSMSKL",
+		"BLSMSKQ",
+		"BLSRL",
+		"BLSRQ",
+		"BZHIL",
+		"BZHIQ",
+		"KADDB",
+		"KADDD",
+		"KADDQ",
+		"KADDW",
+		"KANDB",
+		"KANDD",
+		"KANDNB",
+		"KANDND",
+		"KANDNQ",
+		"KANDNW",
+		"KANDQ",
+		"KANDW",
+		"KMOVB",
+		"KMOVD",
+		"KMOVQ",
+		"KMOVW",
+		"KNOTB",
+		"KNOTD",
+		"KNOTQ",
+		"KNOTW",
+		"KORB",
+		"KORD",
+		"KORQ",
+		"KORTESTB",
+		"KORTESTD",
+		"KORTESTQ",
+		"KORTESTW",
+		"KORW",
+		"KSHIFTLB",
+		"KSHIFTLD",
+		"KSHIFTLQ",
+		"KSHIFTLW",
+		"KSHIFTRB",
+		"KSHIFTRD",
+		"KSHIFTRQ",
+		"KSHIFTRW",
+		"KTESTB",
+		"KTESTD",
+		"KTESTQ",
+		"KTESTW",
+		"KUNPCKBW",
+		"KUNPCKDQ",
+		"KUNPCKWD",
+		"KXNORB",
+		"KXNORD",
+		"KXNORQ",
+		"KXNORW",
+		"KXORB",
+		"KXORD",
+		"KXORQ",
+		"KXORW",
+		"MULXL",
+		"MULXQ",
+		"PDEPL",
+		"PDEPQ",
+		"PEXTL",
+		"PEXTQ",
+		"RORXL",
+		"RORXQ",
+		"SARXL",
+		"SARXQ",
+		"SHLXL",
+		"SHLXQ",
+		"SHRXL",
+		"SHRXQ",
+		"V4FMADDPS",
+		"V4FMADDSS",
+		"V4FNMADDPS",
+		"V4FNMADDSS",
+		"VADDPD",
+		"VADDPS",
+		"VADDSD",
+		"VADDSS",
+		"VADDSUBPD",
+		"VADDSUBPS",
+		"VAESDEC",
+		"VAESDECLAST",
+		"VAESENC",
+		"VAESENCLAST",
+		"VAESIMC",
+		"VAESKEYGENASSIST",
+		"VALIGND",
+		"VALIGNQ",
+		"VANDNPD",
+		"VANDNPS",
+		"VANDPD",
+		"VANDPS",
+		"VBLENDMPD",
+		"VBLENDMPS",
+		"VBLENDPD",
+		"VBLENDPS",
+		"VBLENDVPD",
+		"VBLENDVPS",
+		"VBROADCASTF128",
+		"VBROADCASTF32X2",
+		"VBROADCASTF32X4",
+		"VBROADCASTF32X8",
+		"VBROADCASTF64X2",
+		"VBROADCASTF64X4",
+		"VBROADCASTI128",
+		"VBROADCASTI32X2",
+		"VBROADCASTI32X4",
+		"VBROADCASTI32X8",
+		"VBROADCASTI64X2",
+		"VBROADCASTI64X4",
+		"VBROADCASTSD",
+		"VBROADCASTSS",
+		"VCMPPD",
+		"VCMPPS",
+		"VCMPSD",
+		"VCMPSS",
+		"VCOMISD",
+		"VCOMISS",
+		"VCOMPRESSPD",
+		"VCOMPRESSPS",
+		"VCVTDQ2PD",
+		"VCVTDQ2PS",
+		"VCVTPD2DQ",
+		"VCVTPD2DQX",
+		"VCVTPD2DQY",
+		"VCVTPD2PS",
+		"VCVTPD2PSX",
+		"VCVTPD2PSY",
+		"VCVTPD2QQ",
+		"VCVTPD2UDQ",
+		"VCVTPD2UDQX",
+		"VCVTPD2UDQY",
+		"VCVTPD2UQQ",
+		"VCVTPH2PS",
+		"VCVTPS2DQ",
+		"VCVTPS2PD",
+		"VCVTPS2PH",
+		"VCVTPS2QQ",
+		"VCVTPS2UDQ",
+		"VCVTPS2UQQ",
+		"VCVTQQ2PD",
+		"VCVTQQ2PS",
+		"VCVTQQ2PSX",
+		"VCVTQQ2PSY",
+		"VCVTSD2SI",
+		"VCVTSD2SIQ",
+		"VCVTSD2SS",
+		"VCVTSD2USIL",
+		"VCVTSD2USIQ",
+		"VCVTSI2SDL",
+		"VCVTSI2SDQ",
+		"VCVTSI2SSL",
+		"VCVTSI2SSQ",
+		"VCVTSS2SD",
+		"VCVTSS2SI",
+		"VCVTSS2SIQ",
+		"VCVTSS2USIL",
+		"VCVTSS2USIQ",
+		"VCVTTPD2DQ",
+		"VCVTTPD2DQX",
+		"VCVTTPD2DQY",
+		"VCVTTPD2QQ",
+		"VCVTTPD2UDQ",
+		"VCVTTPD2UDQX",
+		"VCVTTPD2UDQY",
+		"VCVTTPD2UQQ",
+		"VCVTTPS2DQ",
+		"VCVTTPS2QQ",
+		"VCVTTPS2UDQ",
+		"VCVTTPS2UQQ",
+		"VCVTTSD2SI",
+		"VCVTTSD2SIQ",
+		"VCVTTSD2USIL",
+		"VCVTTSD2USIQ",
+		"VCVTTSS2SI",
+		"VCVTTSS2SIQ",
+		"VCVTTSS2USIL",
+		"VCVTTSS2USIQ",
+		"VCVTUDQ2PD",
+		"VCVTUDQ2PS",
+		"VCVTUQQ2PD",
+		"VCVTUQQ2PS",
+		"VCVTUQQ2PSX",
+		"VCVTUQQ2PSY",
+		"VCVTUSI2SDL",
+		"VCVTUSI2SDQ",
+		"VCVTUSI2SSL",
+		"VCVTUSI2SSQ",
+		"VDBPSADBW",
+		"VDIVPD",
+		"VDIVPS",
+		"VDIVSD",
+		"VDIVSS",
+		"VDPPD",
+		"VDPPS",
+		"VEXP2PD",
+		"VEXP2PS",
+		"VEXPANDPD",
+		"VEXPANDPS",
+		"VEXTRACTF128",
+		"VEXTRACTF32X4",
+		"VEXTRACTF32X8",
+		"VEXTRACTF64X2",
+		"VEXTRACTF64X4",
+		"VEXTRACTI128",
+		"VEXTRACTI32X4",
+		"VEXTRACTI32X8",
+		"VEXTRACTI64X2",
+		"VEXTRACTI64X4",
+		"VEXTRACTPS",
+		"VFIXUPIMMPD",
+		"VFIXUPIMMPS",
+		"VFIXUPIMMSD",
+		"VFIXUPIMMSS",
+		"VFMADD132PD",
+		"VFMADD132PS",
+		"VFMADD132SD",
+		"VFMADD132SS",
+		"VFMADD213PD",
+		"VFMADD213PS",
+		"VFMADD213SD",
+		"VFMADD213SS",
+		"VFMADD231PD",
+		"VFMADD231PS",
+		"VFMADD231SD",
+		"VFMADD231SS",
+		"VFMADDPD",
+		"VFMADDPS",
+		"VFMADDSD",
+		"VFMADDSS",
+		"VFMADDSUB132PD",
+		"VFMADDSUB132PS",
+		"VFMADDSUB213PD",
+		"VFMADDSUB213PS",
+		"VFMADDSUB231PD",
+		"VFMADDSUB231PS",
+		"VFMADDSUBPD",
+		"VFMADDSUBPS",
+		"VFMSUB132PD",
+		"VFMSUB132PS",
+		"VFMSUB132SD",
+		"VFMSUB132SS",
+		"VFMSUB213PD",
+		"VFMSUB213PS",
+		"VFMSUB213SD",
+		"VFMSUB213SS",
+		"VFMSUB231PD",
+		"VFMSUB231PS",
+		"VFMSUB231SD",
+		"VFMSUB231SS",
+		"VFMSUBADD132PD",
+		"VFMSUBADD132PS",
+		"VFMSUBADD213PD",
+		"VFMSUBADD213PS",
+		"VFMSUBADD231PD",
+		"VFMSUBADD231PS",
+		"VFMSUBADDPD",
+		"VFMSUBADDPS",
+		"VFMSUBPD",
+		"VFMSUBPS",
+		"VFMSUBSD",
+		"VFMSUBSS",
+		"VFNMADD132PD",
+		"VFNMADD132PS",
+		"VFNMADD132SD",
+		"VFNMADD132SS",
+		"VFNMADD213PD",
+		"VFNMADD213PS",
+		"VFNMADD213SD",
+		"VFNMADD213SS",
+		"VFNMADD231PD",
+		"VFNMADD231PS",
+		"VFNMADD231SD",
+		"VFNMADD231SS",
+		"VFNMADDPD",
+		"VFNMADDPS",
+		"VFNMADDSD",
+		"VFNMADDSS",
+		"VFNMSUB132PD",
+		"VFNMSUB132PS",
+		"VFNMSUB132SD",
+		"VFNMSUB132SS",
+		"VFNMSUB213PD",
+		"VFNMSUB213PS",
+		"VFNMSUB213SD",
+		"VFNMSUB213SS",
+		"VFNMSUB231PD",
+		"VFNMSUB231PS",
+		"VFNMSUB231SD",
+		"VFNMSUB231SS",
+		"VFNMSUBPD",
+		"VFNMSUBPS",
+		"VFNMSUBSD",
+		"VFNMSUBSS",
+		"VFPCLASSPDX",
+		"VFPCLASSPDY",
+		"VFPCLASSPDZ",
+		"VFPCLASSPSX",
+		"VFPCLASSPSY",
+		"VFPCLASSPSZ",
+		"VFPCLASSSD",
+		"VFPCLASSSS",
+		"VGATHERDPD",
+		"VGATHERDPS",
+		"VGATHERPF0DPD",
+		"VGATHERPF0DPS",
+		"VGATHERPF0QPD",
+		"VGATHERPF0QPS",
+		"VGATHERPF1DPD",
+		"VGATHERPF1DPS",
+		"VGATHERPF1QPD",
+		"VGATHERPF1QPS",
+		"VGATHERQPD",
+		"VGATHERQPS",
+		"VGETEXPPD",
+		"VGETEXPPS",
+		"VGETEXPSD",
+		"VGETEXPSS",
+		"VGETMANTPD",
+		"VGETMANTPS",
+		"VGETMANTSD",
+		"VGETMANTSS",
+		"VGF2P8AFFINEINVQB",
+		"VGF2P8AFFINEQB",
+		"VGF2P8MULB",
+		"VHADDPD",
+		"VHADDPS",
+		"VHSUBPD",
+		"VHSUBPS",
+		"VINSERTF128",
+		"VINSERTF32X4",
+		"VINSERTF32X8",
+		"VINSERTF64X2",
+		"VINSERTF64X4",
+		"VINSERTI128",
+		"VINSERTI32X4",
+		"VINSERTI32X8",
+		"VINSERTI64X2",
+		"VINSERTI64X4",
+		"VINSERTPS",
+		"VLDDQU",
+		"VLDMXCSR",
+		"VMASKMOVDQU",
+		"VMASKMOVPD",
+		"VMASKMOVPS",
+		"VMAXPD",
+		"VMAXPS",
+		"VMAXSD",
+		"VMAXSS",
+		"VMINPD",
+		"VMINPS",
+		"VMINSD",
+		"VMINSS",
+		"VMOVAPD",
+		"VMOVAPS",
+		"VMOVD",
+		"VMOVDDUP",
+		"VMOVDQA",
+		"VMOVDQA32",
+		"VMOVDQA64",
+		"VMOVDQU",
+		"VMOVDQU16",
+		"VMOVDQU32",
+		"VMOVDQU64",
+		"VMOVDQU8",
+		"VMOVHLPS",
+		"VMOVHPD",
+		"VMOVHPS",
+		"VMOVLHPS",
+		"VMOVLPD",
+		"VMOVLPS",
+		"VMOVMSKPD",
+		"VMOVMSKPS",
+		"VMOVNTDQ",
+		"VMOVNTDQA",
+		"VMOVNTPD",
+		"VMOVNTPS",
+		"VMOVQ",
+		"VMOVSD",
+		"VMOVSHDUP",
+		"VMOVSLDUP",
+		"VMOVSS",
+		"VMOVUPD",
+		"VMOVUPS",
+		"VMPSADBW",
+		"VMULPD",
+		"VMULPS",
+		"VMULSD",
+		"VMULSS",
+		"VORPD",
+		"VORPS",
+		"VP4DPWSSD",
+		"VP4DPWSSDS",
+		"VPABSB",
+		"VPABSD",
+		"VPABSQ",
+		"VPABSW",
+		"VPACKSSDW",
+		"VPACKSSWB",
+		"VPACKUSDW",
+		"VPACKUSWB",
+		"VPADDB",
+		"VPADDD",
+		"VPADDQ",
+		"VPADDSB",
+		"VPADDSW",
+		"VPADDUSB",
+		"VPADDUSW",
+		"VPADDW",
+		"VPALIGNR",
+		"VPAND",
+		"VPANDD",
+		"VPANDN",
+		"VPANDND",
+		"VPANDNQ",
+		"VPANDQ",
+		"VPAVGB",
+		"VPAVGW",
+		"VPBLENDD",
+		"VPBLENDMB",
+		"VPBLENDMD",
+		"VPBLENDMQ",
+		"VPBLENDMW",
+		"VPBLENDVB",
+		"VPBLENDW",
+		"VPBROADCASTB",
+		"VPBROADCASTD",
+		"VPBROADCASTMB2Q",
+		"VPBROADCASTMW2D",
+		"VPBROADCASTQ",
+		"VPBROADCASTW",
+		"VPCLMULQDQ",
+		"VPCMPB",
+		"VPCMPD",
+		"VPCMPEQB",
+		"VPCMPEQD",
+		"VPCMPEQQ",
+		"VPCMPEQW",
+		"VPCMPESTRI",
+		"VPCMPESTRM",
+		"VPCMPGTB",
+		"VPCMPGTD",
+		"VPCMPGTQ",
+		"VPCMPGTW",
+		"VPCMPISTRI",
+		"VPCMPISTRM",
+		"VPCMPQ",
+		"VPCMPUB",
+		"VPCMPUD",
+		"VPCMPUQ",
+		"VPCMPUW",
+		"VPCMPW",
+		"VPCOMPRESSB",
+		"VPCOMPRESSD",
+		"VPCOMPRESSQ",
+		"VPCOMPRESSW",
+		"VPCONFLICTD",
+		"VPCONFLICTQ",
+		"VPDPBUSD",
+		"VPDPBUSDS",
+		"VPDPWSSD",
+		"VPDPWSSDS",
+		"VPERM2F128",
+		"VPERM2I128",
+		"VPERMB",
+		"VPERMD",
+		"VPERMI2B",
+		"VPERMI2D",
+		"VPERMI2PD",
+		"VPERMI2PS",
+		"VPERMI2Q",
+		"VPERMI2W",
+		"VPERMIL2PD",
+		"VPERMIL2PS",
+		"VPERMILPD",
+		"VPERMILPS",
+		"VPERMPD",
+		"VPERMPS",
+		"VPERMQ",
+		"VPERMT2B",
+		"VPERMT2D",
+		"VPERMT2PD",
+		"VPERMT2PS",
+		"VPERMT2Q",
+		"VPERMT2W",
+		"VPERMW",
+		"VPEXPANDB",
+		"VPEXPANDD",
+		"VPEXPANDQ",
+		"VPEXPANDW",
+		"VPEXTRB",
+		"VPEXTRD",
+		"VPEXTRQ",
+		"VPEXTRW",
+		"VPGATHERDD",
+		"VPGATHERDQ",
+		"VPGATHERQD",
+		"VPGATHERQQ",
+		"VPHADDD",
+		"VPHADDSW",
+		"VPHADDW",
+		"VPHMINPOSUW",
+		"VPHSUBD",
+		"VPHSUBSW",
+		"VPHSUBW",
+		"VPINSRB",
+		"VPINSRD",
+		"VPINSRQ",
+		"VPINSRW",
+		"VPLZCNTD",
+		"VPLZCNTQ",
+		"VPMADD52HUQ",
+		"VPMADD52LUQ",
+		"VPMADDUBSW",
+		"VPMADDWD",
+		"VPMASKMOVD",
+		"VPMASKMOVQ",
+		"VPMAXSB",
+		"VPMAXSD",
+		"VPMAXSQ",
+		"VPMAXSW",
+		"VPMAXUB",
+		"VPMAXUD",
+		"VPMAXUQ",
+		"VPMAXUW",
+		"VPMINSB",
+		"VPMINSD",
+		"VPMINSQ",
+		"VPMINSW",
+		"VPMINUB",
+		"VPMINUD",
+		"VPMINUQ",
+		"VPMINUW",
+		"VPMOVB2M",
+		"VPMOVD2M",
+		"VPMOVDB",
+		"VPMOVDW",
+		"VPMOVM2B",
+		"VPMOVM2D",
+		"VPMOVM2Q",
+		"VPMOVM2W",
+		"VPMOVMSKB",
+		"VPMOVQ2M",
+		"VPMOVQB",
+		"VPMOVQD",
+		"VPMOVQW",
+		"VPMOVSDB",
+		"VPMOVSDW",
+		"VPMOVSQB",
+		"VPMOVSQD",
+		"VPMOVSQW",
+		"VPMOVSWB",
+		"VPMOVSXBD",
+		"VPMOVSXBQ",
+		"VPMOVSXBW",
+		"VPMOVSXDQ",
+		"VPMOVSXWD",
+		"VPMOVSXWQ",
+		"VPMOVUSDB",
+		"VPMOVUSDW",
+		"VPMOVUSQB",
+		"VPMOVUSQD",
+		"VPMOVUSQW",
+		"VPMOVUSWB",
+		"VPMOVW2M",
+		"VPMOVWB",
+		"VPMOVZXBD",
+		"VPMOVZXBQ",
+		"VPMOVZXBW",
+		"VPMOVZXDQ",
+		"VPMOVZXWD",
+		"VPMOVZXWQ",
+		"VPMULDQ",
+		"VPMULHRSW",
+		"VPMULHUW",
+		"VPMULHW",
+		"VPMULLD",
+		"VPMULLQ",
+		"VPMULLW",
+		"VPMULTISHIFTQB",
+		"VPMULUDQ",
+		"VPOPCNTB",
+		"VPOPCNTD",
+		"VPOPCNTQ",
+		"VPOPCNTW",
+		"VPOR",
+		"VPORD",
+		"VPORQ",
+		"VPROLD",
+		"VPROLQ",
+		"VPROLVD",
+		"VPROLVQ",
+		"VPRORD",
+		"VPRORQ",
+		"VPRORVD",
+		"VPRORVQ",
+		"VPSADBW",
+		"VPSCATTERDD",
+		"VPSCATTERDQ",
+		"VPSCATTERQD",
+		"VPSCATTERQQ",
+		"VPSHLDD",
+		"VPSHLDQ",
+		"VPSHLDVD",
+		"VPSHLDVQ",
+		"VPSHLDVW",
+		"VPSHLDW",
+		"VPSHRDD",
+		"VPSHRDQ",
+		"VPSHRDVD",
+		"VPSHRDVQ",
+		"VPSHRDVW",
+		"VPSHRDW",
+		"VPSHUFB",
+		"VPSHUFBITQMB",
+		"VPSHUFD",
+		"VPSHUFHW",
+		"VPSHUFLW",
+		"VPSIGNB",
+		"VPSIGND",
+		"VPSIGNW",
+		"VPSLLD",
+		"VPSLLDQ",
+		"VPSLLQ",
+		"VPSLLVD",
+		"VPSLLVQ",
+		"VPSLLVW",
+		"VPSLLW",
+		"VPSRAD",
+		"VPSRAQ",
+		"VPSRAVD",
+		"VPSRAVQ",
+		"VPSRAVW",
+		"VPSRAW",
+		"VPSRLD",
+		"VPSRLDQ",
+		"VPSRLQ",
+		"VPSRLVD",
+		"VPSRLVQ",
+		"VPSRLVW",
+		"VPSRLW",
+		"VPSUBB",
+		"VPSUBD",
+		"VPSUBQ",
+		"VPSUBSB",
+		"VPSUBSW",
+		"VPSUBUSB",
+		"VPSUBUSW",
+		"VPSUBW",
+		"VPTERNLOGD",
+		"VPTERNLOGQ",
+		"VPTEST",
+		"VPTESTMB",
+		"VPTESTMD",
+		"VPTESTMQ",
+		"VPTESTMW",
+		"VPTESTNMB",
+		"VPTESTNMD",
+		"VPTESTNMQ",
+		"VPTESTNMW",
+		"VPUNPCKHBW",
+		"VPUNPCKHDQ",
+		"VPUNPCKHQDQ",
+		"VPUNPCKHWD",
+		"VPUNPCKLBW",
+		"VPUNPCKLDQ",
+		"VPUNPCKLQDQ",
+		"VPUNPCKLWD",
+		"VPXOR",
+		"VPXORD",
+		"VPXORQ",
+		"VRANGEPD",
+		"VRANGEPS",
+		"VRANGESD",
+		"VRANGESS",
+		"VRCP14PD",
+		"VRCP14PS",
+		"VRCP14SD",
+		"VRCP14SS",
+		"VRCP28PD",
+		"VRCP28PS",
+		"VRCP28SD",
+		"VRCP28SS",
+		"VRCPPS",
+		"VRCPSS",
+		"VREDUCEPD",
+		"VREDUCEPS",
+		"VREDUCESD",
+		"VREDUCESS",
+		"VRNDSCALEPD",
+		"VRNDSCALEPS",
+		"VRNDSCALESD",
+		"VRNDSCALESS",
+		"VROUNDPD",
+		"VROUNDPS",
+		"VROUNDSD",
+		"VROUNDSS",
+		"VRSQRT14PD",
+		"VRSQRT14PS",
+		"VRSQRT14SD",
+		"VRSQRT14SS",
+		"VRSQRT28PD",
+		"VRSQRT28PS",
+		"VRSQRT28SD",
+		"VRSQRT28SS",
+		"VRSQRTPS",
+		"VRSQRTSS",
+		"VSCALEFPD",
+		"VSCALEFPS",
+		"VSCALEFSD",
+		"VSCALEFSS",
+		"VSCATTERDPD",
+		"VSCATTERDPS",
+		"VSCATTERPF0DPD",
+		"VSCATTERPF0DPS",
+		"VSCATTERPF0QPD",
+		"VSCATTERPF0QPS",
+		"VSCATTERPF1DPD",
+		"VSCATTERPF1DPS",
+		"VSCATTERPF1QPD",
+		"VSCATTERPF1QPS",
+		"VSCATTERQPD",
+		"VSCATTERQPS",
+		"VSHUFF32X4",
+		"VSHUFF64X2",
+		"VSHUFI32X4",
+		"VSHUFI64X2",
+		"VSHUFPD",
+		"VSHUFPS",
+		"VSQRTPD",
+		"VSQRTPS",
+		"VSQRTSD",
+		"VSQRTSS",
+		"VSTMXCSR",
+		"VSUBPD",
+		"VSUBPS",
+		"VSUBSD",
+		"VSUBSS",
+		"VTESTPD",
+		"VTESTPS",
+		"VUCOMISD",
+		"VUCOMISS",
+		"VUNPCKHPD",
+		"VUNPCKHPS",
+		"VUNPCKLPD",
+		"VUNPCKLPS",
+		"VXORPD",
+		"VXORPS",
+		"VZEROALL",
+		"VZEROUPPER")
+
+	// AMD-specific VEX opcodes.
+	// Excluded from x86avxgen output for now.
+	amdOpcodes := newStringSet(
+		"VFMADDPD",
+		"VFMADDPS",
+		"VFMADDSD",
+		"VFMADDSS",
+		"VFMADDSUBPD",
+		"VFMADDSUBPS",
+		"VFMSUBADDPD",
+		"VFMSUBADDPS",
+		"VFMSUBPD",
+		"VFMSUBPS",
+		"VFMSUBSD",
+		"VFMSUBSS",
+		"VFNMADDPD",
+		"VFNMADDPS",
+		"VFNMADDSD",
+		"VFNMADDSS",
+		"VFNMSUBPD",
+		"VFNMSUBPS",
+		"VFNMSUBSD",
+		"VFNMSUBSS",
+		"VPERMIL2PD",
+		"VPERMIL2PS")
+
+	ctx := newTestContext(t)
+	buildTables(ctx)
+
+	for op := range amdOpcodes {
+		delete(expectedOpcodes, op)
+	}
+	for op := range ctx.optabs {
+		delete(expectedOpcodes, op)
 	}
 
-	expectedChanges := map[string]string{
-		// Before: 256/Y variant.
-		// Now:    256/Y + 128/X variants.
-		"AVPALIGNR": "{AVPALIGNR, yvex_vpalignr, Pvex, [23]uint8{vexNDS | vex128 | vex66 | vex0F3A | vexWIG, 0x0F, vexNDS | vex256 | vex66 | vex0F3A | vexWIG, 0x0F}}",
-		"AVPBLENDD": "{AVPBLENDD, yvex_vpalignr, Pvex, [23]uint8{vexNDS | vex128 | vex66 | vex0F3A | vexW0, 0x02, vexNDS | vex256 | vex66 | vex0F3A | vexW0, 0x02}}",
-
-		// Before: R+R variants.
-		// Now:    R+R and R+M variants.
-		"AVBROADCASTSS": "{AVBROADCASTSS, yvex_vpbroadcast_ss, Pvex, [23]uint8{vexNOVSR | vex128 | vex66 | vex0F38 | vexW0, 0x18, vexNOVSR | vex128 | vex66 | vex0F38 | vexW0, 0x18, vexNOVSR | vex256 | vex66 | vex0F38 | vexW0, 0x18, vexNOVSR | vex256 | vex66 | vex0F38 | vexW0, 0x18}}",
-		"AVBROADCASTSD": "{AVBROADCASTSD, yvex_vpbroadcast_sd, Pvex, [23]uint8{vexNOVSR | vex256 | vex66 | vex0F38 | vexW0, 0x19, vexNOVSR | vex256 | vex66 | vex0F38 | vexW0, 0x19}}",
-
-		// Before: VEX.L=128 (vex128).
-		// Now:    VEX.L=IGNORE (vexLIG); as in "x86.csv".
-		"AVSUBSD": "{AVSUBSD, yvex_x3, Pvex, [23]uint8{vexNDS | vexLIG | vexF2 | vex0F | vexWIG, 0x5C}}",
-		"AVADDSD": "{AVADDSD, yvex_x3, Pvex, [23]uint8{vexNDS | vexLIG | vexF2 | vex0F | vexWIG, 0x58}}",
-
-		// Before: VEX.W=IGNORE (vexWIG).
-		// Now:    VEX.W=W0 (vexW0); as in "x86.csv".
-		"AVINSERTI128": "{AVINSERTI128, yvex_xyi4, Pvex, [23]uint8{vexNDS | vex256 | vex66 | vex0F3A | vexW0, 0x38}}",
-		"AVPERM2I128":  "{AVPERM2I128, yvex_yyi4, Pvex, [23]uint8{vexNDS | vex256 | vex66 | vex0F3A | vexW0, 0x46}}",
-	}
-
-	reportError := func(name, want, have string) {
-		t.Errorf("%s: output mismatch\n\twant:'%s'\n\thave:'%s'",
-			name, want, have)
-	}
-
-	// Perform check.
-	haveLines := linesToMap(strings.Split(newOptabs.String(), "\n"))
-	wantLines := linesToMap(strings.Split(existingOptabs, "\n"))
-	for name, wantLine := range wantLines {
-		haveLine := haveLines[name]
-
-		haveLine = strings.Trim(haveLine, " \t,")
-		wantLine = strings.Trim(wantLine, " \t,")
-
-		if haveLine == "" {
-			t.Errorf("%s: not found", name)
-			continue
-		}
-
-		if line := expectedChanges[name]; line != "" {
-			if line != haveLine {
-				reportError(name, line, haveLine)
-			}
-			continue
-		}
-
-		if !strings.Contains(haveLine, wantLine) {
-			reportError(name, wantLine, haveLine)
-		}
+	for op := range expectedOpcodes {
+		t.Errorf("missing opcode: %s", op)
 	}
 }
diff --git a/x86/x86avxgen/config.go b/x86/x86avxgen/config.go
deleted file mode 100644
index c4cd0d4..0000000
--- a/x86/x86avxgen/config.go
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package main
-
-import "path/filepath"
-
-const (
-	progName = "x86avxgen"
-	specFile = "x86.v0.2.csv" // Default spec filename
-
-	// Paths are relative to GOROOT.
-	pathVexOptabs = "src/cmd/internal/obj/x86/vex_optabs.go"
-	pathAenum     = "src/cmd/internal/obj/x86/aenum.go"
-	pathAnames    = "src/cmd/internal/obj/x86/anames.go"
-	pathTests     = "src/cmd/asm/internal/asm/testdata/amd64enc.s"
-)
-
-var (
-	filenameVexOptabs = filepath.Base(pathVexOptabs)
-	filenameAenum     = filepath.Base(pathAenum)
-	filenameAnames    = filepath.Base(pathAnames)
-	filenameTests     = filepath.Base(pathTests)
-)
diff --git a/x86/x86avxgen/decode.go b/x86/x86avxgen/decode.go
new file mode 100644
index 0000000..667f77c
--- /dev/null
+++ b/x86/x86avxgen/decode.go
@@ -0,0 +1,395 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"fmt"
+	"log"
+	"regexp"
+	"strings"
+
+	"golang.org/x/arch/x86/xeddata"
+)
+
+// encoding is decoded XED instruction pattern.
+type encoding struct {
+	// opbyte is opcode byte (one that follows [E]VEX prefix).
+	// It's called "opcode" in Intel manual, but we use that for
+	// instruction name (iclass in XED terms).
+	opbyte string
+
+	// opdigit is ModRM.Reg field used to encode opcode extension.
+	// In Intel manual, "/digit" notation is used.
+	opdigit string
+
+	// vex represents [E]VEX fields that are used in a first [E]VEX
+	// opBytes element (see prefixExpr function).
+	vex struct {
+		P string // 66/F2/F3
+		L string // 128/256/512
+		M string // 0F/0F38/0F3A
+		W string // W0/W1
+	}
+
+	// evexScale is a scaling factor used to calculate compact disp-8.
+	evexScale string
+
+	// evexBcstScale is like evexScale, but used during broadcasting.
+	// Empty for optab entries that do not have broadcasting support.
+	evexBcstScale string
+
+	// evex describes which features of EVEX can be used by optab entry.
+	// All flags are "false" for VEX-encoded insts.
+	evex struct {
+		// There is no "broadcast" flag because it's inferred
+		// from non-empty evexBcstScale.
+
+		SAE      bool // EVEX.b controls SAE for reg-reg insts
+		Rounding bool // EVEX.b + EVEX.RC (VL) control rounding for FP insts
+		Zeroing  bool // Instruction can use zeroing.
+	}
+}
+
+type decoder struct {
+	ctx   *context
+	insts []*instruction
+}
+
+// decodeGroups fills ctx.groups with decoded instruction groups.
+//
+// Reads XED objects from ctx.xedPath.
+func decodeGroups(ctx *context) {
+	d := decoder{ctx: ctx}
+	groups := make(map[string][]*instruction)
+	for _, inst := range d.DecodeAll() {
+		groups[inst.opcode] = append(groups[inst.opcode], inst)
+	}
+	for op, insts := range groups {
+		ctx.groups = append(ctx.groups, &instGroup{
+			opcode: op,
+			list:   insts,
+		})
+	}
+}
+
+// DecodeAll decodes every XED instruction.
+func (d *decoder) DecodeAll() []*instruction {
+	err := xeddata.WalkInsts(d.ctx.xedPath, func(inst *xeddata.Inst) {
+		inst.Pattern = xeddata.ExpandStates(d.ctx.db, inst.Pattern)
+		pset := xeddata.NewPatternSet(inst.Pattern)
+
+		opcode := inst.Iclass
+
+		switch {
+		case inst.HasAttribute("AMDONLY") || inst.Extension == "XOP":
+			return // Only VEX and EVEX are supported
+		case !pset.Is("VEX") && !pset.Is("EVEX"):
+			return // Skip non-AVX instructions
+		case inst.RealOpcode == "N":
+			return // Skip unstable instructions
+		}
+
+		// Expand some patterns to simplify decodePattern.
+		pset.Replace("FIX_ROUND_LEN128()", "VL=0")
+		pset.Replace("FIX_ROUND_LEN512()", "VL=2")
+
+		mask, args := d.decodeArgs(pset, inst)
+		d.insts = append(d.insts, &instruction{
+			pset:   pset,
+			opcode: opcode,
+			mask:   mask,
+			args:   args,
+			enc:    d.decodePattern(pset, inst),
+		})
+	})
+	if err != nil {
+		log.Fatalf("walk insts: %v", err)
+	}
+	return d.insts
+}
+
+// registerArgs maps XED argument name RHS to its decoded version.
+var registerArgs = map[string]argument{
+	"GPR32_R()":  {"Yrl", "reg"},
+	"GPR64_R()":  {"Yrl", "reg"},
+	"VGPR32_R()": {"Yrl", "reg"},
+	"VGPR64_R()": {"Yrl", "reg"},
+	"VGPR32_N()": {"Yrl", "regV"},
+	"VGPR64_N()": {"Yrl", "regV"},
+	"GPR32_B()":  {"Yrl", "reg/mem"},
+	"GPR64_B()":  {"Yrl", "reg/mem"},
+	"VGPR32_B()": {"Yrl", "reg/mem"},
+	"VGPR64_B()": {"Yrl", "reg/mem"},
+
+	"XMM_R()":  {"Yxr", "reg"},
+	"XMM_R3()": {"YxrEvex", "reg"},
+	"XMM_N()":  {"Yxr", "regV"},
+	"XMM_N3()": {"YxrEvex", "regV"},
+	"XMM_B()":  {"Yxr", "reg/mem"},
+	"XMM_B3()": {"YxrEvex", "reg/mem"},
+	"XMM_SE()": {"Yxr", "regIH"},
+
+	"YMM_R()":  {"Yyr", "reg"},
+	"YMM_R3()": {"YyrEvex", "reg"},
+	"YMM_N()":  {"Yyr", "regV"},
+	"YMM_N3()": {"YyrEvex", "regV"},
+	"YMM_B()":  {"Yyr", "reg/mem"},
+	"YMM_B3()": {"YyrEvex", "reg/mem"},
+	"YMM_SE()": {"Yyr", "regIH"},
+
+	"ZMM_R3()": {"Yzr", "reg"},
+	"ZMM_N3()": {"Yzr", "regV"},
+	"ZMM_B3()": {"Yzr", "reg/mem"},
+
+	"MASK_R()": {"Yk", "reg"},
+	"MASK_N()": {"Yk", "regV"},
+	"MASK_B()": {"Yk", "reg/mem"},
+
+	"MASKNOT0()": {"Yknot0", "kmask"},
+
+	// Handled specifically in "generate".
+	"MASK1()": {"MASK1()", "MASK1()"},
+}
+
+func (d *decoder) decodeArgs(pset xeddata.PatternSet, inst *xeddata.Inst) (mask *argument, args []*argument) {
+	for i, f := range strings.Fields(inst.Operands) {
+		xarg, err := xeddata.NewOperand(d.ctx.db, f)
+		if err != nil {
+			log.Fatalf("%s: args[%d]: %v", inst, i, err)
+		}
+
+		switch {
+		case xarg.Action == "":
+			continue // Skip meta args like EMX_BROADCAST_1TO32_8
+		case !xarg.IsVisible():
+			continue
+		}
+
+		arg := &argument{}
+		args = append(args, arg)
+
+		switch xarg.NameLHS() {
+		case "IMM0":
+			if xarg.Width != "b" {
+				log.Fatalf("%s: args[%d]: expected width=b, found %s", inst, i, xarg.Width)
+			}
+			if pset["IMM0SIGNED=1"] {
+				arg.ytype = "Yi8"
+			} else {
+				arg.ytype = "Yu8"
+			}
+			arg.zkind = "imm8"
+
+		case "REG0", "REG1", "REG2", "REG3":
+			rhs := xarg.NameRHS()
+			if rhs == "MASK1()" {
+				mask = arg
+			}
+			*arg = registerArgs[rhs]
+			if arg.ytype == "" {
+				log.Fatalf("%s: args[%d]: unexpected %s reg", inst, i, rhs)
+			}
+			if xarg.Attributes["MULTISOURCE4"] {
+				arg.ytype += "Multi4"
+			}
+
+		case "MEM0":
+			arg.ytype = pset.MatchOrDefault("Ym",
+				"VMODRM_XMM()", "Yxvm",
+				"VMODRM_YMM()", "Yyvm",
+				"UISA_VMODRM_XMM()", "YxvmEvex",
+				"UISA_VMODRM_YMM()", "YyvmEvex",
+				"UISA_VMODRM_ZMM()", "Yzvm",
+			)
+			arg.zkind = "reg/mem"
+
+		default:
+			log.Fatalf("%s: args[%d]: unexpected %s", inst, i, xarg.NameRHS())
+		}
+	}
+
+	// Reverse args.
+	for i := len(args)/2 - 1; i >= 0; i-- {
+		j := len(args) - 1 - i
+		args[i], args[j] = args[j], args[i]
+	}
+
+	return mask, args
+}
+
+func (d *decoder) decodePattern(pset xeddata.PatternSet, inst *xeddata.Inst) *encoding {
+	var enc encoding
+
+	enc.opdigit = d.findOpdigit(pset)
+	enc.opbyte = d.findOpbyte(pset, inst)
+
+	if strings.Contains(inst.Attributes, "DISP8_") {
+		enc.evexScale = d.findEVEXScale(pset)
+		enc.evexBcstScale = d.findEVEXBcstScale(pset, inst)
+	}
+
+	enc.vex.P = pset.Match(
+		"VEX_PREFIX=1", "66",
+		"VEX_PREFIX=2", "F2",
+		"VEX_PREFIX=3", "F3")
+	enc.vex.M = pset.Match(
+		"MAP=1", "0F",
+		"MAP=2", "0F38",
+		"MAP=3", "0F3A")
+	enc.vex.L = pset.MatchOrDefault("128",
+		"VL=0", "128",
+		"VL=1", "256",
+		"VL=2", "512")
+	enc.vex.W = pset.MatchOrDefault("W0",
+		"REXW=0", "W0",
+		"REXW=1", "W1")
+
+	if pset.Is("EVEX") {
+		enc.evex.SAE = strings.Contains(inst.Operands, "TXT=SAESTR")
+		enc.evex.Rounding = strings.Contains(inst.Operands, "TXT=ROUNDC")
+		enc.evex.Zeroing = strings.Contains(inst.Operands, "TXT=ZEROSTR")
+	}
+
+	// Prefix each non-empty part with vex or evex.
+	parts := [...]*string{
+		&enc.evexScale, &enc.evexBcstScale,
+		&enc.vex.P, &enc.vex.M, &enc.vex.L, &enc.vex.W,
+	}
+	for _, p := range parts {
+		if *p == "" {
+			continue
+		}
+		if pset.Is("EVEX") {
+			*p = "evex" + *p
+		} else {
+			*p = "vex" + *p
+		}
+	}
+
+	return &enc
+}
+
+func (d *decoder) findOpdigit(pset xeddata.PatternSet) string {
+	reg := pset.Index(
+		"REG[0b000]",
+		"REG[0b001]",
+		"REG[0b010]",
+		"REG[0b011]",
+		"REG[0b100]",
+		"REG[0b101]",
+		"REG[0b110]",
+		"REG[0b111]",
+	)
+	// Fixed ModRM.Reg field means that it is used for opcode extension.
+	if reg != -1 {
+		return fmt.Sprintf("0%d", reg)
+	}
+	return ""
+}
+
+// opbyteRE matches uint8 hex literal.
+var opbyteRE = regexp.MustCompile(`0x[0-9A-F]{2}`)
+
+func (d *decoder) findOpbyte(pset xeddata.PatternSet, inst *xeddata.Inst) string {
+	opbyte := ""
+	for k := range pset {
+		if opbyteRE.MatchString(k) {
+			if opbyte == "" {
+				opbyte = k
+			} else {
+				log.Fatalf("%s: multiple opbytes", inst)
+			}
+		}
+	}
+	return opbyte
+}
+
+func (d *decoder) findEVEXScale(pset xeddata.PatternSet) string {
+	switch {
+	case pset["NELEM_FULL()"], pset["NELEM_FULLMEM()"]:
+		return pset.Match(
+			"VL=0", "N16",
+			"VL=1", "N32",
+			"VL=2", "N64")
+	case pset["NELEM_MOVDDUP()"]:
+		return pset.Match(
+			"VL=0", "N8",
+			"VL=1", "N32",
+			"VL=2", "N64")
+	case pset["NELEM_HALF()"], pset["NELEM_HALFMEM()"]:
+		return pset.Match(
+			"VL=0", "N8",
+			"VL=1", "N16",
+			"VL=2", "N32")
+	case pset["NELEM_QUARTERMEM()"]:
+		return pset.Match(
+			"VL=0", "N4",
+			"VL=1", "N8",
+			"VL=2", "N16")
+	case pset["NELEM_EIGHTHMEM()"]:
+		return pset.Match(
+			"VL=0", "N2",
+			"VL=1", "N4",
+			"VL=2", "N8")
+	case pset["NELEM_TUPLE2()"]:
+		return pset.Match(
+			"ESIZE_32_BITS()", "N8",
+			"ESIZE_64_BITS()", "N16")
+	case pset["NELEM_TUPLE4()"]:
+		return pset.Match(
+			"ESIZE_32_BITS()", "N16",
+			"ESIZE_64_BITS()", "N32")
+	case pset["NELEM_TUPLE8()"]:
+		return "N32"
+	case pset["NELEM_MEM128()"], pset["NELEM_TUPLE1_4X()"]:
+		return "N16"
+	}
+
+	// Explicit list is required to make it possible to
+	// detect unhandled nonterminals for the caller.
+	scalars := [...]string{
+		"NELEM_SCALAR()",
+		"NELEM_GSCAT()",
+		"NELEM_GPR_READER()",
+		"NELEM_GPR_READER_BYTE()",
+		"NELEM_GPR_READER_WORD()",
+		"NELEM_GPR_WRITER_STORE()",
+		"NELEM_GPR_WRITER_STORE_BYTE()",
+		"NELEM_GPR_WRITER_STORE_WORD()",
+		"NELEM_GPR_WRITER_LDOP_D()",
+		"NELEM_GPR_WRITER_LDOP_Q()",
+		"NELEM_TUPLE1()",
+		"NELEM_TUPLE1_BYTE()",
+		"NELEM_TUPLE1_WORD()",
+	}
+	for _, scalar := range scalars {
+		if pset[scalar] {
+			return pset.Match(
+				"ESIZE_8_BITS()", "N1",
+				"ESIZE_16_BITS()", "N2",
+				"ESIZE_32_BITS()", "N4",
+				"ESIZE_64_BITS()", "N8")
+		}
+	}
+
+	return ""
+}
+
+func (d *decoder) findEVEXBcstScale(pset xeddata.PatternSet, inst *xeddata.Inst) string {
+	// Only FULL and HALF tuples are affected by the broadcasting.
+	switch {
+	case pset["NELEM_FULL()"]:
+		return pset.Match(
+			"ESIZE_32_BITS()", "BcstN4",
+			"ESIZE_64_BITS()", "BcstN8")
+	case pset["NELEM_HALF()"]:
+		return "BcstN4"
+	default:
+		if inst.HasAttribute("BROADCAST_ENABLED") {
+			log.Fatalf("%s: unexpected tuple for bcst", inst)
+		}
+		return ""
+	}
+}
diff --git a/x86/x86avxgen/diag.go b/x86/x86avxgen/diag.go
deleted file mode 100644
index 18e7c11..0000000
--- a/x86/x86avxgen/diag.go
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package main
-
-// These diagnostics were extensively used during development phase.
-// Now they serve as additional level of tests.
-// If coverage is not 100% for any reason, troubleshooting is required.
-
-import (
-	"fmt"
-	"sort"
-)
-
-// diagnostics is used to collect and display execution info.
-type diagnostics struct {
-	// Count misses for undefined ytab key.
-	ytabMisses      map[string]int
-	optabsGenerated int
-	optabsTotal     int
-}
-
-func (d *diagnostics) Print() {
-	fmt.Println("  -- diag info --")
-	d.printOptabsInfo()
-	fmt.Println()
-	d.printYtabMisses()
-}
-
-func (d *diagnostics) printOptabsInfo() {
-	skipped := d.optabsTotal - d.optabsGenerated
-	cover := float64(d.optabsGenerated*100) / float64(d.optabsTotal)
-	fmt.Println("Optabs info:")
-	fmt.Printf("  processed: %d\n", d.optabsTotal)
-	fmt.Printf("  generated: %d\n", d.optabsGenerated)
-	fmt.Printf("    skipped: %d\n", skipped)
-	fmt.Printf("      cover: %.1f%%\n", cover)
-}
-
-func (d *diagnostics) printYtabMisses() {
-	if len(d.ytabMisses) == 0 {
-		fmt.Println("No ytab key misses recorded")
-		return
-	}
-
-	// Sort by miss count.
-	type ytabMiss struct {
-		key   string
-		count int
-	}
-	misses := make([]ytabMiss, 0, len(d.ytabMisses))
-	for key, count := range d.ytabMisses {
-		misses = append(misses, ytabMiss{
-			key:   key,
-			count: count,
-		})
-	}
-	sort.Slice(misses, func(i, j int) bool {
-		return misses[i].count > misses[j].count
-	})
-
-	fmt.Println("Missed ytab keys:")
-	for _, m := range misses {
-		fmt.Printf("  %+40s = %d\n", m.key, m.count)
-	}
-}
-
-var diag = diagnostics{
-	ytabMisses: make(map[string]int),
-}
diff --git a/x86/x86avxgen/generate.go b/x86/x86avxgen/generate.go
new file mode 100644
index 0000000..14985cb
--- /dev/null
+++ b/x86/x86avxgen/generate.go
@@ -0,0 +1,255 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"bytes"
+	"log"
+	"strings"
+)
+
+// ytab is ytabList element.
+type ytab struct {
+	Zcase   string
+	Zoffset int
+	ArgList string // Ytypes that are matched by this ytab.
+}
+
+// ytabList is a named set of ytab objects.
+// In asm6.go represented as []ytab.
+type ytabList struct {
+	Name  string
+	Ytabs []ytab
+}
+
+// optab describes instruction encodings for specific opcode.
+type optab struct {
+	Opcode   string
+	YtabList *ytabList
+	OpLines  []string
+}
+
+type generator struct {
+	ctx       *context
+	ytabLists map[string]*ytabList
+}
+
+// generateOptabs fills ctx.optabs and ctx.ytabLists with objects created
+// from decoded instructions.
+func generateOptabs(ctx *context) {
+	gen := generator{ctx: ctx, ytabLists: make(map[string]*ytabList)}
+	optabs := make(map[string]*optab)
+	for _, g := range ctx.groups {
+		optabs[g.opcode] = gen.GenerateGroup(g)
+	}
+	ctx.optabs = optabs
+	ctx.ytabLists = gen.ytabLists
+}
+
+// GenerateGroup converts g into optab.
+// Populates internal ytab list map.
+func (gen *generator) GenerateGroup(g *instGroup) *optab {
+	var opLines []string
+	for _, inst := range g.list {
+		opLines = append(opLines, gen.generateOpLine(inst))
+	}
+	return &optab{
+		Opcode:   "A" + g.opcode,
+		OpLines:  opLines,
+		YtabList: gen.internYtabList(g),
+	}
+}
+
+// generateOpLine returns string that describes opBytes for single instruction form.
+func (gen *generator) generateOpLine(inst *instruction) string {
+	parts := []string{gen.prefixExpr(inst)}
+	if inst.pset.Is("EVEX") {
+		parts = append(parts, gen.evexPrefixExpr(inst))
+	}
+	parts = append(parts, inst.enc.opbyte)
+	if inst.enc.opdigit != "" {
+		parts = append(parts, inst.enc.opdigit)
+	}
+	return strings.Join(parts, ", ")
+}
+
+func (gen *generator) prefixExpr(inst *instruction) string {
+	enc := inst.enc
+	return gen.joinPrefixParts([]string{
+		// Special constant that makes AVX byte different from 0x0F,
+		// making it unnecessary to check for both VEX+EVEX when
+		// assigning dealing with legacy instructions that skip it
+		// without advancing "z" counter.
+		"avxEscape",
+		enc.vex.L,
+		enc.vex.P,
+		enc.vex.M,
+		enc.vex.W,
+	})
+}
+
+func (gen *generator) evexPrefixExpr(inst *instruction) string {
+	enc := inst.enc
+	parts := []string{
+		enc.evexScale,
+		enc.evexBcstScale,
+	}
+	if enc.evex.SAE {
+		parts = append(parts, "evexSaeEnabled")
+	}
+	if enc.evex.Rounding {
+		parts = append(parts, "evexRoundingEnabled")
+	}
+	if enc.evex.Zeroing {
+		parts = append(parts, "evexZeroingEnabled")
+	}
+	return gen.joinPrefixParts(parts)
+}
+
+// joinPrefixParts returns the Go OR-expression for every non-empty name.
+// If every name is empty, returns "0".
+func (gen *generator) joinPrefixParts(names []string) string {
+	filterEmptyStrings := func(xs []string) []string {
+		ys := xs[:0]
+		for _, x := range xs {
+			if x != "" {
+				ys = append(ys, x)
+			}
+		}
+		return ys
+	}
+
+	names = filterEmptyStrings(names)
+	if len(names) == 0 {
+		return "0"
+	}
+	return strings.Join(names, "|")
+}
+
+// internYtabList returns ytabList for given group.
+//
+// Returned ytab lists are interned.
+// Same ytab list can be returned for different groups.
+func (gen *generator) internYtabList(g *instGroup) *ytabList {
+	var key string
+	{
+		var buf bytes.Buffer
+		for _, inst := range g.list {
+			buf.WriteString(inst.zform)
+			buf.WriteByte('=')
+			buf.WriteString(inst.YtypeListString())
+			buf.WriteByte(';')
+		}
+		key = buf.String()
+	}
+	if ylist := gen.ytabLists[key]; ylist != nil {
+		return ylist
+	}
+
+	var ytabs []ytab
+	for _, inst := range g.list {
+		zoffset := 2
+		if inst.pset.Is("EVEX") {
+			zoffset++ // Always at least 3 bytes
+		}
+		if inst.enc.opdigit != "" {
+			zoffset++
+		}
+
+		if inst.mask != nil {
+			ytabs = append(ytabs, gen.makeMaskYtabs(zoffset, inst)...)
+		} else {
+			ytabs = append(ytabs, gen.makeYtab(zoffset, inst.zform, inst.args))
+		}
+	}
+	ylist := &ytabList{
+		Name:  "_y" + strings.ToLower(g.opcode),
+		Ytabs: ytabs,
+	}
+	gen.ytabLists[key] = ylist
+	return ylist
+}
+
+var zcaseByZform = map[string]string{
+	"evex imm8 reg kmask reg/mem":          "Zevex_i_r_k_rm",
+	"evex imm8 reg reg/mem":                "Zevex_i_r_rm",
+	"evex imm8 reg/mem kmask reg":          "Zevex_i_rm_k_r",
+	"evex imm8 reg/mem kmask regV opdigit": "Zevex_i_rm_k_vo",
+	"evex imm8 reg/mem reg":                "Zevex_i_rm_r",
+	"evex imm8 reg/mem regV opdigit":       "Zevex_i_rm_vo",
+	"evex imm8 reg/mem regV kmask reg":     "Zevex_i_rm_v_k_r",
+	"evex imm8 reg/mem regV reg":           "Zevex_i_rm_v_r",
+	"evex kmask reg/mem opdigit":           "Zevex_k_rmo",
+	"evex reg kmask reg/mem":               "Zevex_r_k_rm",
+	"evex reg reg/mem":                     "Zevex_r_v_rm",
+	"evex reg regV kmask reg/mem":          "Zevex_r_v_k_rm",
+	"evex reg regV reg/mem":                "Zevex_r_v_rm",
+	"evex reg/mem kmask reg":               "Zevex_rm_k_r",
+	"evex reg/mem reg":                     "Zevex_rm_v_r",
+	"evex reg/mem regV kmask reg":          "Zevex_rm_v_k_r",
+	"evex reg/mem regV reg":                "Zevex_rm_v_r",
+
+	"":                          "Zvex",
+	"imm8 reg reg/mem":          "Zvex_i_r_rm",
+	"imm8 reg/mem reg":          "Zvex_i_rm_r",
+	"imm8 reg/mem regV opdigit": "Zvex_i_rm_vo",
+	"imm8 reg/mem regV reg":     "Zvex_i_rm_v_r",
+	"reg reg/mem":               "Zvex_r_v_rm",
+	"reg regV reg/mem":          "Zvex_r_v_rm",
+	"reg/mem opdigit":           "Zvex_rm_v_ro",
+	"reg/mem reg":               "Zvex_rm_v_r",
+	"reg/mem regV opdigit":      "Zvex_rm_r_vo",
+	"reg/mem regV reg":          "Zvex_rm_v_r",
+	"reg/mem":                   "Zvex_rm_v_r",
+	"regIH reg/mem regV reg":    "Zvex_hr_rm_v_r",
+	"regV reg/mem reg":          "Zvex_v_rm_r",
+}
+
+func (gen *generator) makeYtab(zoffset int, zform string, args []*argument) ytab {
+	var ytypes []string
+	for _, arg := range args {
+		if arg.ytype != "Ynone" {
+			ytypes = append(ytypes, arg.ytype)
+		}
+	}
+	argList := strings.Join(ytypes, ", ")
+	zcase := zcaseByZform[zform]
+	if zcase == "" {
+		log.Fatalf("no zcase for %q", zform)
+	}
+	return ytab{
+		Zcase:   zcase,
+		Zoffset: zoffset,
+		ArgList: argList,
+	}
+}
+
+// makeMaskYtabs returns 2 ytabs created from instruction with MASK1() argument.
+//
+// This is required due to how masking is implemented in asm6.
+// Single MASK1() instruction produces 2 ytabs, for example:
+//	1. OP xmm, mem     | Yxr, Yxm         | Does not permit K arguments (K0 implied)
+//	2. OP xmm, K2, mem | Yxr, Yknot0, Yxm | Does not permit K0 argument
+//
+// This function also exploits that both ytab entries have same opbytes,
+// hence it is efficient to emit only one opbytes line and 0 Z-offset
+// for first ytab object.
+func (gen *generator) makeMaskYtabs(zoffset int, inst *instruction) []ytab {
+	var k0 ytab
+	{
+		zform := strings.Replace(inst.zform, "MASK1() ", "", 1)
+		inst.mask.ytype = "Ynone"
+		k0 = gen.makeYtab(0, zform, inst.args)
+	}
+	var knot0 ytab
+	{
+		zform := strings.Replace(inst.zform, "MASK1() ", "kmask ", 1)
+		inst.mask.ytype = "Yknot0"
+		knot0 = gen.makeYtab(zoffset, zform, inst.args)
+	}
+
+	inst.mask.ytype = "MASK1()" // Restore Y-type
+	return []ytab{k0, knot0}
+}
diff --git a/x86/x86avxgen/generate_aenum.go b/x86/x86avxgen/generate_aenum.go
deleted file mode 100644
index 9462b55..0000000
--- a/x86/x86avxgen/generate_aenum.go
+++ /dev/null
@@ -1,152 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package main
-
-import (
-	"bytes"
-	"errors"
-	"fmt"
-	"go/ast"
-	"go/format"
-	"go/parser"
-	"go/token"
-	"io"
-	"io/ioutil"
-	"sort"
-)
-
-// generateAenum generates instruction ID enumeration.
-// Adds elements from newNames if they are not already there.
-// Output enum entries are sorted by their name (except ALAST
-// which is always the last element).
-//
-// Reader - old/current "aenum.go" contents provider.
-// Writer - new "aenum.go" contents consumer.
-//
-// Reads r to examine current A-enum (instruction IDs prefixed with "A")
-// file contents. Updated contents are written to w.
-func generateAenum(r io.Reader, w io.Writer, newNames []string) error {
-	f, fset, err := parseFile(r)
-	if err != nil {
-		return err
-	}
-
-	decl := removeAenumDecl(f)
-	if decl == nil {
-		return errors.New(filenameAenum + " missing AXXX const decl clause")
-	}
-	last := decl.Specs[len(decl.Specs)-1]
-	decl.Specs = decl.Specs[:len(decl.Specs)-1] // Drop "ALAST".
-	for _, name := range newNames {
-		decl.Specs = append(decl.Specs, &ast.ValueSpec{
-			Names: []*ast.Ident{{Name: "A" + name}},
-		})
-	}
-	sort.Slice(decl.Specs, func(i, j int) bool {
-		x, y := decl.Specs[i].(*ast.ValueSpec), decl.Specs[j].(*ast.ValueSpec)
-		return x.Names[0].Name < y.Names[0].Name
-	})
-	decl.Specs = append(decl.Specs, last)
-
-	// Reset nodes positions.
-	for _, spec := range decl.Specs {
-		spec := spec.(*ast.ValueSpec)
-		resetPos(spec)
-		if spec.Doc != nil {
-			return fmt.Errorf("%s: doc comments are not supported", spec.Names[0].Name)
-		}
-		if spec.Comment != nil {
-			resetPos(spec.Comment)
-		}
-	}
-
-	var buf bytes.Buffer
-	format.Node(&buf, fset, f)
-	buf.WriteByte('\n')
-	format.Node(&buf, fset, decl)
-
-	// Additional formatting call is needed to make
-	// whitespace gofmt-compliant.
-	prettyCode, err := format.Source(buf.Bytes())
-	if err != nil {
-		return err
-	}
-	w.Write(prettyCode)
-
-	return nil
-}
-
-// removeAenumDecl searches AXXX constand decl and removes it from f.
-// Associated comments are also removed.
-// Returns AXXX declaration or nil, if it was not found.
-func removeAenumDecl(f *ast.File) *ast.GenDecl {
-	for i, decl := range f.Decls {
-		decl, ok := decl.(*ast.GenDecl)
-		if !ok {
-			continue
-		}
-		if decl.Tok != token.CONST {
-			continue
-		}
-		// AXXX enum is distinguished by trailing ALAST.
-		last := decl.Specs[len(decl.Specs)-1].(*ast.ValueSpec)
-		if len(last.Names) == 1 && last.Names[0].Name == "ALAST" {
-			// Remove comments.
-			blacklist := make(map[*ast.CommentGroup]bool)
-			if decl.Doc != nil {
-				blacklist[decl.Doc] = true
-			}
-			for _, spec := range decl.Specs {
-				spec := spec.(*ast.ValueSpec)
-				if spec.Doc != nil {
-					blacklist[spec.Doc] = true
-				}
-				if spec.Comment != nil {
-					blacklist[spec.Comment] = true
-				}
-			}
-			comments := f.Comments[:0]
-			for _, c := range f.Comments {
-				if !blacklist[c] {
-					comments = append(comments, c)
-				}
-			}
-			f.Comments = comments
-			// Remove decl itself.
-			f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
-
-			return decl
-		}
-	}
-
-	return nil
-}
-
-// reset node position info.
-func resetPos(node ast.Node) {
-	switch node := node.(type) {
-	case *ast.CommentGroup:
-		node.List[0].Slash = 0
-	case *ast.ValueSpec:
-		node.Names[0].NamePos = 0
-	default:
-		panic(fmt.Sprintf("can't reset pos for %T", node))
-	}
-}
-
-// parseFile parses file that is identified by specified path.
-func parseFile(r io.Reader) (*ast.File, *token.FileSet, error) {
-	src, err := ioutil.ReadAll(r)
-	if err != nil {
-		return nil, nil, err
-	}
-	fset := token.NewFileSet()
-	mode := parser.ParseComments
-	f, err := parser.ParseFile(fset, filenameAenum, src, mode)
-	if err != nil {
-		return nil, nil, err
-	}
-	return f, fset, nil
-}
diff --git a/x86/x86avxgen/instruction.go b/x86/x86avxgen/instruction.go
new file mode 100644
index 0000000..a20d141
--- /dev/null
+++ b/x86/x86avxgen/instruction.go
@@ -0,0 +1,64 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"strings"
+
+	"golang.org/x/arch/x86/xeddata"
+)
+
+// argument is a describes single instruction operand properties.
+type argument struct {
+	// ytype is argument class as returned by asm6 "oclass" function.
+	ytype string
+
+	// zkind is a partial Z-case matcher.
+	// Determines which Z-case handles the encoding of instruction.
+	zkind string
+}
+
+// instruction is decoded XED instruction.
+// Used to produce ytabs and optabs in later phases.
+type instruction struct {
+	// opcode is instruction symbolic name.
+	opcode string
+
+	pset xeddata.PatternSet
+	enc  *encoding
+
+	// mask is EVEX K-register argument; points to args element.
+	// Used to emit Yk0+Yknot0 table entries.
+	// Nil for VEX-encoded insts.
+	mask *argument
+	args []*argument
+
+	// zform is a pattern that determines which encoder Z-case is used.
+	// We store zform instead of zcase directly because it's further
+	// expanded during optabs generation.
+	zform string
+}
+
+// String returns short inst printed representation.
+func (inst *instruction) String() string { return inst.opcode }
+
+// YtypeListString joins each argument Y-type and returns the result.
+func (inst *instruction) YtypeListString() string {
+	var parts []string
+	for _, arg := range inst.args {
+		parts = append(parts, arg.ytype)
+	}
+	return strings.Join(parts, " ")
+}
+
+// ArgIndexByZkind returns first argument matching given zkind or -1.
+func (inst *instruction) ArgIndexByZkind(zkind string) int {
+	for i, arg := range inst.args {
+		if arg.zkind == zkind {
+			return i
+		}
+	}
+	return -1
+}
diff --git a/x86/x86avxgen/main.go b/x86/x86avxgen/main.go
index be57570..9fdf262 100644
--- a/x86/x86avxgen/main.go
+++ b/x86/x86avxgen/main.go
@@ -1,318 +1,361 @@
-// Copyright 2017 The Go Authors. All rights reserved.
+// Copyright 2018 The Go Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style
 // license that can be found in the LICENSE file.
 
-// X86avxgen generates Go code for obj/x86 that adds AVX instructions support.
-//
-// Currently supports only AVX1 and AVX2 instructions.
-// When x86.csv will contain AVX512 instructions and
-// asm6.go is patched to support them,
-// this program can be extended to generate the remainder.
-//
-// The output consists of multiple files:
-// - cmd/internal/obj/x86/aenum.go
-//	Add enum entries for new instructions.
-// - cmd/internal/obj/x86/anames.go
-//	Add new instruction names.
-// - cmd/internal/obj/x86/vex_optabs.go
-//	Add new instruction optabs.
-// - cmd/asm/internal/asm/testdata/amd64enc.s
-//	Uncomment tests for added instructions.
-//
-// Usage:
-//	x86avxgen -goroot=$DEV_GOROOT [-csv=x86.csv] [-output=x86avxgen-output]
-// $DEV_GOROOT is a path to Go repository working tree root.
-//
-// To get precise usage information, call this program without arguments.
 package main
 
 import (
-	"bufio"
-	"bytes"
-	"errors"
 	"flag"
 	"fmt"
-	"go/format"
-	"io"
-	"io/ioutil"
 	"log"
 	"os"
-	"os/exec"
-	"regexp"
+	"sort"
 	"strings"
 
-	"golang.org/x/arch/x86/x86csv"
+	"golang.org/x/arch/x86/xeddata"
 )
 
+// instGroup holds a list of instructions with same opcode.
+type instGroup struct {
+	opcode string
+	list   []*instruction
+}
+
+// context is x86avxgen program execution state.
+type context struct {
+	db *xeddata.Database
+
+	groups []*instGroup
+
+	optabs    map[string]*optab
+	ytabLists map[string]*ytabList
+
+	// Command line arguments:
+
+	xedPath string
+}
+
 func main() {
-	goroot := flag.String(
-		"goroot", "",
-		"Go sources root path")
-	csv := flag.String(
-		"csv", specFile,
-		"Absolute path to x86spec CSV file")
-	output := flag.String(
-		"output", "x86avxgen-output",
-		"Where to put output files")
-	autopatchEnabled := flag.Bool(
-		"autopatch", false,
-		"Try automatic patching (writes to goroot, unsafe if it is not under VCS)")
-	diagEnabled := flag.Bool(
-		"diag", false,
-		"Print debug information")
+	log.SetPrefix("x86avxgen: ")
+	log.SetFlags(log.Lshortfile)
+
+	var ctx context
+
+	runSteps(&ctx,
+		parseFlags,
+		openDatabase,
+		buildTables,
+		printTables)
+}
+
+func buildTables(ctx *context) {
+	// Order of steps is significant.
+	runSteps(ctx,
+		decodeGroups,
+		mergeRegMem,
+		addGoSuffixes,
+		mergeWIG,
+		assignZforms,
+		sortGroups,
+		generateOptabs)
+}
+
+func runSteps(ctx *context, steps ...func(*context)) {
+	for _, f := range steps {
+		f(ctx)
+	}
+}
+
+func parseFlags(ctx *context) {
+	flag.StringVar(&ctx.xedPath, "xedPath", "./xedpath",
+		"XED datafiles location")
+
 	flag.Parse()
-	if len(os.Args) == 1 {
-		fmt.Printf("%s: x86 AVX ytab generator", progName)
-		flag.Usage()
-		os.Exit(1)
-	}
-	if *goroot == "" {
-		log.Fatal("goroot arg is mandatory")
-	}
-	if _, err := os.Stat(*csv); os.IsNotExist(err) {
-		log.Fatalf("spec file %s not found", *csv)
-	}
-
-	r, err := specRowReader(*csv)
-	if err != nil {
-		log.Fatal(err)
-	}
-	if err := os.MkdirAll(*output, 0755); err != nil {
-		log.Fatal(err)
-	}
-
-	opcodes, err := doGenerateVexOptabs(r, mustOpenFile(*output+"/"+filenameVexOptabs))
-	if err != nil {
-		log.Fatal(err)
-	}
-	if err := doGenerateAenum(*goroot, *output, opcodes); err != nil {
-		log.Fatal(err)
-	}
-	if err := doGenerateAnames(*output); err != nil {
-		log.Fatal(err)
-	}
-	if err := doGenerateTests(*goroot, *output, opcodes); err != nil {
-		log.Fatal(err)
-	}
-
-	if *autopatchEnabled {
-		if err := doAutopatch(*goroot, *output); err != nil {
-			log.Fatal(err)
-		}
-	}
-
-	if *diagEnabled {
-		diag.Print()
-	}
 }
 
-func mustOpenFile(path string) *os.File {
-	f, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
+func openDatabase(ctx *context) {
+	db, err := xeddata.NewDatabase(ctx.xedPath)
 	if err != nil {
-		log.Fatal(err)
+		log.Fatalf("open database: %v", err)
 	}
-	return f
+	ctx.db = db
 }
 
-// filterVEX removes all non-VEX instructions from insts.
-// Returns updates slice.
-func filterVEX(insts []*x86csv.Inst) []*x86csv.Inst {
-	vexInsts := insts[:0]
-	for _, inst := range insts {
-		// Checking CPUID for AVX is not good enough
-		// in this case, because some instructions
-		// have VEX prefix, but no AVX CPUID flag.
-		if strings.HasPrefix(inst.Encoding, "VEX.") {
-			vexInsts = append(vexInsts, inst)
-		}
-	}
-	return vexInsts
-}
-
-func doGenerateVexOptabs(r *x86csv.Reader, w io.Writer) (opcodes []string, err error) {
-	insts, err := r.ReadAll()
-	if err != nil {
-		return nil, err
-	}
-	insts = filterVEX(insts)
-
-	var buf bytes.Buffer
-
-	visitOptab := func(o optab) {
-		diag.optabsGenerated++
-
-		opcodes = append(opcodes, o.as)
-
-		tmpl := "\t{A%s, %s, Pvex, [23]uint8{%s}},\n"
-		fmt.Fprintf(&buf, tmpl, o.as, o.ytabID, strings.Join(o.op, ","))
+// mergeRegMem merges reg-only with mem-only instructions.
+// For example: {MOVQ reg, mem} + {MOVQ reg, reg} = {MOVQ reg, reg/mem}.
+func mergeRegMem(ctx *context) {
+	mergeKey := func(inst *instruction) string {
+		return strings.Join([]string{
+			fmt.Sprint(len(inst.args)),
+			inst.enc.opbyte,
+			inst.enc.opdigit,
+			inst.enc.vex.P,
+			inst.enc.vex.L,
+			inst.enc.vex.M,
+			inst.enc.vex.W,
+		}, " ")
 	}
 
-	doGroups(insts, func(op string, insts []*x86csv.Inst) {
-		diag.optabsTotal++
-
-		if ot, ok := precomputedOptabs[op]; ok {
-			log.Printf("notice: using precomputed %s optab", op)
-			visitOptab(ot)
-			return
-		}
-
-		key := ytabKey(op, insts)
-		ytabID := ytabMap[key]
-		if ytabID == "" {
-			diag.ytabMisses[key]++
-			log.Printf("warning: skip %s: no ytabID for '%s' key", op, key)
-			return
-		}
-		var encParts []string
-		for _, inst := range insts {
-			enc := parseEncoding(inst.Encoding)
-
-			encParts = append(encParts, vexExpr(enc.vex))
-			encParts = append(encParts, "0x"+enc.opbyte)
-			if enc.opdigit != "" {
-				encParts = append(encParts, "0"+enc.opdigit)
+	for _, g := range ctx.groups {
+		regOnly := make(map[string]*instruction)
+		memOnly := make(map[string]*instruction)
+		list := g.list[:0]
+		for _, inst := range g.list {
+			switch {
+			case inst.pset.Is("RegOnly"):
+				regOnly[mergeKey(inst)] = inst
+			case inst.pset.Is("MemOnly"):
+				memOnly[mergeKey(inst)] = inst
+			default:
+				if len(inst.args) == 0 {
+					list = append(list, inst)
+					continue
+				}
+				log.Fatalf("%s: unexpected MOD value", inst)
 			}
 		}
-		visitOptab(optab{
-			as:     op,
-			ytabID: ytabID,
-			op:     encParts,
-		})
+
+		for k, m := range memOnly {
+			r := regOnly[k]
+			if r != nil {
+				index := m.ArgIndexByZkind("reg/mem")
+				arg := m.args[index]
+				switch ytype := r.args[index].ytype; ytype {
+				case "Yrl":
+					arg.ytype = "Yml"
+				case "Yxr":
+					arg.ytype = "Yxm"
+				case "YxrEvex":
+					arg.ytype = "YxmEvex"
+				case "Yyr":
+					arg.ytype = "Yym"
+				case "YyrEvex":
+					arg.ytype = "YymEvex"
+				case "Yzr":
+					arg.ytype = "Yzm"
+				case "Yk":
+					arg.ytype = "Ykm"
+				default:
+					log.Fatalf("%s: unexpected register type: %s", r, ytype)
+				}
+				// Merge EVEX flags into m.
+				m.enc.evex.SAE = m.enc.evex.SAE || r.enc.evex.SAE
+				m.enc.evex.Rounding = m.enc.evex.Rounding || r.enc.evex.Rounding
+				m.enc.evex.Zeroing = m.enc.evex.Zeroing || r.enc.evex.Zeroing
+				delete(regOnly, k)
+			}
+			list = append(list, m)
+		}
+		for _, r := range regOnly {
+			list = append(list, r)
+		}
+
+		g.list = list
+	}
+}
+
+// mergeWIG merges [E]VEX.W0 + [E]VEX.W1 into [E]VEX.WIG.
+func mergeWIG(ctx *context) {
+	mergeKey := func(inst *instruction) string {
+		return strings.Join([]string{
+			fmt.Sprint(len(inst.args)),
+			inst.enc.opbyte,
+			inst.enc.opdigit,
+			inst.enc.vex.P,
+			inst.enc.vex.L,
+			inst.enc.vex.M,
+		}, " ")
+	}
+
+	for _, g := range ctx.groups {
+		w0map := make(map[string]*instruction)
+		w1map := make(map[string]*instruction)
+		list := g.list[:0]
+		for _, inst := range g.list {
+			switch w := inst.enc.vex.W; w {
+			case "evexW0", "vexW0":
+				w0map[mergeKey(inst)] = inst
+			case "evexW1", "vexW1":
+				w1map[mergeKey(inst)] = inst
+			default:
+				log.Fatalf("%s: unexpected vex.W: %s", inst, w)
+			}
+		}
+
+		for k, w0 := range w0map {
+			w1 := w1map[k]
+			if w1 != nil {
+				w0.enc.vex.W = strings.Replace(w0.enc.vex.W, "W0", "WIG", 1)
+				delete(w1map, k)
+			}
+			list = append(list, w0)
+		}
+		for _, w1 := range w1map {
+			list = append(list, w1)
+		}
+
+		g.list = list
+	}
+}
+
+// assignZforms initializes zform field of every instruction in ctx.
+func assignZforms(ctx *context) {
+	for _, g := range ctx.groups {
+		for _, inst := range g.list {
+			var parts []string
+			if inst.pset.Is("EVEX") {
+				parts = append(parts, "evex")
+			}
+			for _, arg := range inst.args {
+				parts = append(parts, arg.zkind)
+			}
+			if inst.enc.opdigit != "" {
+				parts = append(parts, "opdigit")
+			}
+			inst.zform = strings.Join(parts, " ")
+		}
+	}
+}
+
+// sortGroups sorts each instruction group by opcode as well as instructions
+// inside groups by special rules (see below).
+//
+// The order of instructions inside group determine ytab
+// elements order inside ytabList.
+//
+// We want these rules to be satisfied:
+//	- EVEX-encoded entries go after VEX-encoded entries.
+//	  This way, VEX forms are selected over EVEX variants.
+//	- EVEX forms with SAE/RC must go before forms without them.
+//	  This helps to avoid problems with reg-reg instructions
+//	  that encode either of them in ModRM.R/M which causes
+//	  ambiguity in ytabList (more than 1 ytab can match args).
+//	  If first matching ytab has SAE/RC, problem will not occur.
+//	- Memory argument position affects order.
+//	  Required to be in sync with XED encoder when there
+//	  are multiple choices of how to encode instruction.
+func sortGroups(ctx *context) {
+	sort.SliceStable(ctx.groups, func(i, j int) bool {
+		return ctx.groups[i].opcode < ctx.groups[j].opcode
 	})
 
-	tmpl := `// Code generated by %s. DO NOT EDIT.
-
-package x86
-
-var vexOptab = []Optab{
-%s
-}
-`
-	code := []byte(fmt.Sprintf(tmpl, progName, buf.String()))
-
-	prettyCode, err := format.Source(code)
-	if err != nil {
-		return nil, err
+	for _, g := range ctx.groups {
+		sortInstList(g.list)
 	}
-
-	_, err = w.Write(prettyCode)
-
-	return opcodes, err
 }
 
-func doGenerateAenum(goroot, output string, newNames []string) error {
-	w, err := os.Create(output + "/" + filenameAenum)
-	if err != nil {
-		return err
+func sortInstList(insts []*instruction) {
+	// Use strings for sorting to get reliable transitive "less".
+	order := make(map[*instruction]string)
+	for _, inst := range insts {
+		encTag := 'a'
+		if inst.pset.Is("EVEX") {
+			encTag = 'b'
+		}
+		memTag := 'a'
+		if index := inst.ArgIndexByZkind("reg/mem"); index != -1 {
+			memTag = 'z' - rune(index)
+		}
+		rcsaeTag := 'a'
+		if !(inst.enc.evex.SAE || inst.enc.evex.Rounding) {
+			rcsaeTag = 'b'
+		}
+		order[inst] = fmt.Sprintf("%c%c%c %s",
+			encTag, memTag, rcsaeTag, inst.YtypeListString())
 	}
-	defer w.Close()
-	r, err := os.Open(goroot + "/" + pathAenum)
-	if err != nil {
-		return err
-	}
-	defer r.Close()
 
-	return generateAenum(r, w, newNames)
+	sort.SliceStable(insts, func(i, j int) bool {
+		return order[insts[i]] < order[insts[j]]
+	})
 }
 
-func doGenerateAnames(output string) error {
-	// Runs "go generate" over previously generated aenum file.
-	path := output + "/" + filenameAenum
-	cmd := exec.Command("go", "generate", path)
-	var buf bytes.Buffer
-	cmd.Stderr = &buf
-	err := cmd.Run()
-	if err != nil {
-		return errors.New(err.Error() + ": " + buf.String())
+// addGoSuffixes splits some groups into several groups by introducing a suffix.
+// For example, ANDN group becomes ANDNL and ANDNQ (ANDN becomes empty itself).
+// Empty groups are removed.
+func addGoSuffixes(ctx *context) {
+	var opcodeSuffixMatchers map[string][]string
+	{
+		opXY := []string{"VL=0", "X", "VL=1", "Y"}
+		opXYZ := []string{"VL=0", "X", "VL=1", "Y", "VL=2", "Z"}
+		opQ := []string{"REXW=1", "Q"}
+		opLQ := []string{"REXW=0", "L", "REXW=1", "Q"}
+
+		opcodeSuffixMatchers = map[string][]string{
+			"VCVTPD2DQ":   opXY,
+			"VCVTPD2PS":   opXY,
+			"VCVTTPD2DQ":  opXY,
+			"VCVTQQ2PS":   opXY,
+			"VCVTUQQ2PS":  opXY,
+			"VCVTPD2UDQ":  opXY,
+			"VCVTTPD2UDQ": opXY,
+
+			"VFPCLASSPD": opXYZ,
+			"VFPCLASSPS": opXYZ,
+
+			"VCVTSD2SI":  opQ,
+			"VCVTTSD2SI": opQ,
+			"VCVTTSS2SI": opQ,
+			"VCVTSS2SI":  opQ,
+
+			"VCVTSD2USI":  opLQ,
+			"VCVTSS2USI":  opLQ,
+			"VCVTTSD2USI": opLQ,
+			"VCVTTSS2USI": opLQ,
+			"VCVTUSI2SD":  opLQ,
+			"VCVTUSI2SS":  opLQ,
+			"VCVTSI2SD":   opLQ,
+			"VCVTSI2SS":   opLQ,
+			"ANDN":        opLQ,
+			"BEXTR":       opLQ,
+			"BLSI":        opLQ,
+			"BLSMSK":      opLQ,
+			"BLSR":        opLQ,
+			"BZHI":        opLQ,
+			"MULX":        opLQ,
+			"PDEP":        opLQ,
+			"PEXT":        opLQ,
+			"RORX":        opLQ,
+			"SARX":        opLQ,
+			"SHLX":        opLQ,
+			"SHRX":        opLQ,
+		}
 	}
-	return nil
-}
 
-// testLineReplacer is used in uncommentedTestLine function.
-var testLineReplacer = strings.NewReplacer(
-	"//TODO: ", "",
+	newGroups := make(map[string][]*instruction)
+	for _, g := range ctx.groups {
+		kv := opcodeSuffixMatchers[g.opcode]
+		if kv == nil {
+			continue
+		}
 
-	// Fix register references.
-	"XMM", "X",
-	"YMM", "Y",
-)
-
-func uncommentedTestLine(line string) string {
-	// Sync with x86/x86test/print.go.
-	const x86testFmt = "\t%-39s // %s"
-
-	line = testLineReplacer.Replace(line)
-	i := strings.Index(line, " // ")
-	return fmt.Sprintf(x86testFmt, line[len("\t"):i], line[i+len(" // "):])
-}
-
-// stringsSet returns a map mapping each x in xs to true.
-func stringsSet(xs []string) map[string]bool {
-	set := make(map[string]bool, len(xs))
-	for _, x := range xs {
-		set[x] = true
-	}
-	return set
-}
-
-func doGenerateTests(goroot, output string, newNames []string) error {
-	testsFile, err := os.Open(goroot + "/" + pathTests)
-	if err != nil {
-		return err
-	}
-	defer testsFile.Close()
-
-	var rxCommentedTestCase = regexp.MustCompile(`//TODO: ([A-Z][A-Z0-9]+)`)
-
-	newNamesSet := stringsSet(newNames)
-
-	var buf bytes.Buffer
-	scanner := bufio.NewScanner(testsFile)
-	for scanner.Scan() {
-		line := scanner.Text()
-		m := rxCommentedTestCase.FindStringSubmatch(line)
-		if m != nil {
-			name := string(m[1])
-			if newNamesSet[name] {
-				line = uncommentedTestLine(line)
+		list := g.list[:0]
+		for _, inst := range g.list {
+			newOp := inst.opcode + inst.pset.Match(kv...)
+			if newOp != inst.opcode {
+				inst.opcode = newOp
+				newGroups[newOp] = append(newGroups[newOp], inst)
+			} else {
+				list = append(list, inst)
 			}
 		}
-		buf.WriteString(line)
-		buf.WriteByte('\n')
+		g.list = list
 	}
-
-	return ioutil.WriteFile(output+"/"+filenameTests, buf.Bytes(), 0644)
-}
-
-func doAutopatch(goroot, output string) error {
-	from := [...]string{
-		output + "/" + filenameVexOptabs,
-		output + "/" + filenameAenum,
-		output + "/" + filenameAnames,
-		output + "/" + filenameTests,
-	}
-	to := [...]string{
-		goroot + "/" + pathVexOptabs,
-		goroot + "/" + pathAenum,
-		goroot + "/" + pathAnames,
-		goroot + "/" + pathTests,
-	}
-
-	// No recovery if rename will fail.
-	// There is a warning in "autopatch" description.
-	for i := range from {
-		if err := os.Rename(from[i], to[i]); err != nil {
-			return err
+	groups := ctx.groups[:0] // Filled with non-empty groups
+	// Some groups may become empty due to opcode split.
+	for _, g := range ctx.groups {
+		if len(g.list) != 0 {
+			groups = append(groups, g)
 		}
 	}
-
-	return nil
+	for op, insts := range newGroups {
+		groups = append(groups, &instGroup{
+			opcode: op,
+			list:   insts,
+		})
+	}
+	ctx.groups = groups
 }
 
-func specRowReader(path string) (*x86csv.Reader, error) {
-	f, err := os.Open(path)
-	if err != nil {
-		return nil, err
-	}
-	return x86csv.NewReader(bufio.NewReader(f)), nil
+func printTables(ctx *context) {
+	writeTables(os.Stdout, ctx)
 }
diff --git a/x86/x86avxgen/print.go b/x86/x86avxgen/print.go
new file mode 100644
index 0000000..c17dbaf
--- /dev/null
+++ b/x86/x86avxgen/print.go
@@ -0,0 +1,116 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"bytes"
+	"go/format"
+	"io"
+	"log"
+	"sort"
+	"text/template"
+)
+
+var tablesTemplate = template.Must(template.New("avx_optabs").Parse(`
+// Code generated by x86avxgen. DO NOT EDIT.
+
+package x86
+
+// VEX instructions that come in two forms:
+//	VTHING xmm2/m128, xmmV, xmm1
+//	VTHING ymm2/m256, ymmV, ymm1
+//
+// The opcode array in the corresponding Optab entry
+// should contain the (VEX prefixes, opcode byte) pair
+// for each of the two forms.
+// For example, the entries for VPXOR are:
+//
+//	VPXOR xmm2/m128, xmmV, xmm1
+//	VEX.NDS.128.66.0F.WIG EF /r
+//
+//	VPXOR ymm2/m256, ymmV, ymm1
+//	VEX.NDS.256.66.0F.WIG EF /r
+//
+// Produce this optab entry:
+//
+//	{AVPXOR, yvex_xy3, Pavx, opBytes{vex128|vex66|vex0F|vexWIG, 0xEF, vex256|vex66|vex0F|vexWIG, 0xEF}}
+//
+// VEX requires at least 2 bytes inside opBytes:
+//	- VEX prefixes (vex-prefixed constants)
+//	- Opcode byte
+//
+// EVEX instructions extend VEX form variety:
+//	VTHING zmm2/m512, zmmV, zmm1    -- implicit K0 (merging)
+//	VTHING zmm2/m512, zmmV, K, zmm1 -- explicit K mask (can't use K0)
+//
+// EVEX requires at least 3 bytes inside opBytes:
+//	- EVEX prefixes (evex-prefixed constants); similar to VEX
+//	- Displacement multiplier info (scale / broadcast scale)
+//	- Opcode byte; similar to VEX
+//
+// Both VEX and EVEX instructions may have opdigit (opcode extension) byte
+// which follows the primary opcode byte.
+// Because it can only have value of 0-7, it is written in octal notation.
+//
+// x86.csv can be very useful for figuring out proper [E]VEX parts.
+
+{{ range .Ylists }}
+  var {{.Name}} = []ytab{
+    {{- range .Ytabs }}
+      {zcase: {{.Zcase}}, zoffset: {{.Zoffset}}, args: argList{ {{.ArgList}} }},
+    {{- end }}
+  }
+{{ end }}
+
+var avxOptab = [...]Optab{
+  {{- range .Optabs }}
+    {as: {{.Opcode}}, ytab: {{.YtabList.Name}}, prefix: Pavx, op: opBytes{
+      {{- range .OpLines }}
+        {{.}},
+      {{- end }}
+    }},
+  {{- end }}
+}
+`))
+
+// writeTables writes avx optabs file contents to w.
+func writeTables(w io.Writer, ctx *context) {
+	ylists := make([]*ytabList, 0, len(ctx.ytabLists))
+	for _, ylist := range ctx.ytabLists {
+		ylists = append(ylists, ylist)
+	}
+	sort.Slice(ylists, func(i, j int) bool {
+		return ylists[i].Name < ylists[j].Name
+	})
+	optabs := make([]*optab, 0, len(ctx.optabs))
+	for _, o := range ctx.optabs {
+		optabs = append(optabs, o)
+	}
+	sort.Slice(optabs, func(i, j int) bool {
+		return optabs[i].Opcode < optabs[j].Opcode
+	})
+
+	var buf bytes.Buffer
+	err := tablesTemplate.Execute(&buf, struct {
+		Ylists []*ytabList
+		Optabs []*optab
+	}{
+		Ylists: ylists,
+		Optabs: optabs,
+	})
+	if err != nil {
+		log.Fatalf("template execute error: %v", err)
+	}
+
+	// TODO: invoke "go fmt" or format.Gofmt? #22695.
+	prettyCode, err := format.Source(buf.Bytes())
+	if err != nil {
+		log.Fatalf("gofmt error: %v", err)
+	}
+
+	if _, err := w.Write(prettyCode); err != nil {
+		log.Fatalf("write output: %v", err)
+	}
+}
diff --git a/x86/x86avxgen/tables.go b/x86/x86avxgen/tables.go
deleted file mode 100644
index 5d80daa..0000000
--- a/x86/x86avxgen/tables.go
+++ /dev/null
@@ -1,186 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package main
-
-// ytabMap maps keys generated with keyFromInsts to ytab identifiers.
-var ytabMap = map[string]ytabID{
-	"": "yvex",
-
-	// 1 form:
-	"m":           "yvex_m",
-	"y/m,x":       "yvex_y2",
-	"x/m,r":       "yvex_vcvtsd2si",
-	"m,y":         "yvex_vbroadcastf",
-	"m,x;m,y":     "yvex_mxy",
-	"x,x":         "yvex_xx2",
-	"x/m,x":       "yvex_x2",
-	"x/m,xV,x":    "yvex_x3",
-	"x,xV,x":      "yvex_xx3",
-	"y/m,yV,y":    "yvex_yy3",
-	"r/m,rV":      "yvex_r2",
-	"r/m,rV,r":    "yvex_r3",
-	"r/m,xV,x":    "yvex_rx3",
-	"rV,r/m,r":    "yvex_vmr3",
-	"i8,r/m,r":    "yvex_ri3",
-	"i8,x/m,x":    "yvex_xi3",
-	"i8,x,r/m":    "yvex_vpextr",
-	"i8,y,x/m":    "yvex_yi3",
-	"i8,y/m,y":    "yvex_vpermpd",
-	"i8,r/m,xV,x": "yvex_rxi4",
-	"i8,x/m,xV,x": "yvex_xxi4",
-	"i8,x/m,yV,y": "yvex_xyi4",
-	"i8,y/m,yV,y": "yvex_yyi4",
-
-	// 2 forms:
-	"m,y;x,y":                   "yvex_vpbroadcast_sd",
-	"i8,x,r;i8,x,r/m":           "yvex_vpextrw",
-	"i8,y,x/m;i8,x,x/m":         "yvex_vcvtps2ph",
-	"i8,x/m,x;i8,y/m,y":         "yvex_xyi3",
-	"i8,x/m,xV,x;i8,y/m,yV,y":   "yvex_vpalignr",
-	"i8,x,xV;i8,y,yV":           "yvex_shift_dq",
-	"x/m,xV,x;y/m,yV,y":         "yvex_xy3",
-	"x/m,xV,x;i8,x,xV":          "yvex_shift",
-	"x/m,x;x/m,y":               "yvex_vpbroadcast",
-	"x/m,x;y/m,y":               "yvex_xy2",
-	"x,m;y,m":                   "yvex_vmovntdq",
-	"x,r/m;r/m,x":               "yvex_vmovd",
-	"x,r;y,r":                   "yvex_xyr2",
-	"x,m;m,xV,x":                "yvex_vmovhpd",
-	"xIH,x/m,xV,x;yIH,y/m,yV,y": "yvex_xy4",
-
-	// 4 forms:
-	"m,x;x,x;m,y;x,y":                     "yvex_vpbroadcast_ss",
-	"x,m;m,x;x,xV,x;x,xV,x":               "yvex_vmov",
-	"x,xV,m;y,yV,m;m,xV,x;m,yV,y":         "yvex_vblendvpd",
-	"x/m,x;x,x/m;y/m,y;y,y/m":             "yvex_vmovdqa",
-	"x/m,xV,x;i8,x,xV;x/m,yV,y;i8,y,yV":   "yvex_vps",
-	"i8,x/m,x;x/m,xV,x;i8,y/m,y;y/m,yV,y": "yvex_vpermilp",
-
-	// 5 forms:
-	"x,r/m;m,x;r/m,x;x,x;x,x/m": "yvex_vmovq",
-}
-
-// precomputedOptabs is used to emit some optabs that can not be
-// generated with normal execution path.
-var precomputedOptabs = map[string]optab{
-	// This is added to avoid backwards-incompatible change.
-	//
-	// initially, yvex_xyi3 was added with Yi8 args.
-	// Later, it was decided to make it Yu8, but Yi8 forms
-	// were preserved as well.
-	// So, 4 ytabs instead of 2.
-	"VPSHUFD": {
-		"VPSHUFD",
-		"yvex_xyi3",
-		[]string{
-			"vexNOVSR | vex128 | vex66 | vex0F | vexWIG", "0x70",
-			"vexNOVSR | vex256 | vex66 | vex0F | vexWIG", "0x70",
-			"vexNOVSR | vex128 | vex66 | vex0F | vexWIG", "0x70",
-			"vexNOVSR | vex256 | vex66 | vex0F | vexWIG", "0x70",
-		},
-	},
-
-	// Instructions that can not be constructed from
-	// "x86.csv" because it only have 2/4 forms.
-	"VPSRLQ": {
-		"VPSRLQ",
-		"yvex_shift",
-		[]string{
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x73", "0xD0",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x73", "0xD0",
-			"vexNDS | vex128 | vex66 | vex0F | vexWIG", "0xD3",
-			"vexNDS | vex256 | vex66 | vex0F | vexWIG", "0xD3",
-		},
-	},
-	"VPSLLQ": {
-		"VPSLLQ",
-		"yvex_shift",
-		[]string{
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x73", "0xF0",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x73", "0xF0",
-			"vexNDS | vex128 | vex66 | vex0F | vexWIG", "0xF3",
-			"vexNDS | vex256 | vex66 | vex0F | vexWIG", "0xF3",
-		},
-	},
-	"VPSLLD": {
-		"VPSLLD",
-		"yvex_shift",
-		[]string{
-			"vexNDS | vex128 | vex66 | vex0F | vexWIG", "0x72", "0xF0",
-			"vexNDS | vex256 | vex66 | vex0F | vexWIG", "0x72", "0xF0",
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0xF2",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0xF2",
-		},
-	},
-	"VPSRLD": {
-		"VPSRLD",
-		"yvex_shift",
-		[]string{
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x72", "0xD0",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x72", "0xD0",
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0xD2",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0xD2",
-		},
-	},
-
-	// Thease are here due to adhoc encoded
-	// ModR/M opcode extension.
-	"VPSLLDQ": {
-		"VPSLLDQ",
-		"yvex_shift_dq",
-		[]string{
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x73", "0xF8",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x73", "0xF8",
-		},
-	},
-	"VPSRLDQ": {
-		"VPSRLDQ",
-		"yvex_shift_dq",
-		[]string{
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x73", "0xD8",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x73", "0xD8",
-		},
-	},
-	"VPSLLW": {
-		"VPSLLW",
-		"yvex_vps",
-		[]string{
-			"vexNDS | vex128 | vex66 | vex0F | vexWIG", "0xF1",
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x71", "0xF0",
-			"vexNDS | vex256 | vex66 | vex0F | vexWIG", "0xF1",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x71", "0xF0",
-		},
-	},
-	"VPSRAD": {
-		"VPSRAD",
-		"yvex_vps",
-		[]string{
-			"vexNDS | vex128 | vex66 | vex0F | vexWIG", "0xE2",
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x72", "0xE0",
-			"vexNDS | vex256 | vex66 | vex0F | vexWIG", "0xE2",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x72", "0xE0",
-		},
-	},
-	"VPSRAW": {
-		"VPSRAW",
-		"yvex_vps",
-		[]string{
-			"vexNDS | vex128 | vex66 | vex0F | vexWIG", "0xE1",
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x71", "0xE0",
-			"vexNDS | vex256 | vex66 | vex0F | vexWIG", "0xE1",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x71", "0xE0",
-		},
-	},
-	"VPSRLW": {
-		"VPSRLW",
-		"yvex_vps",
-		[]string{
-			"vexNDS | vex128 | vex66 | vex0F | vexWIG", "0xD1",
-			"vexNDD | vex128 | vex66 | vex0F | vexWIG", "0x71", "0xD0",
-			"vexNDS | vex256 | vex66 | vex0F | vexWIG", "0xD1",
-			"vexNDD | vex256 | vex66 | vex0F | vexWIG", "0x71", "0xD0",
-		},
-	},
-}
diff --git a/x86/x86avxgen/testdata/golden.txt b/x86/x86avxgen/testdata/golden.txt
new file mode 100644
index 0000000..c5bb549
--- /dev/null
+++ b/x86/x86avxgen/testdata/golden.txt
@@ -0,0 +1,160 @@
+var _yvmovsd = []ytab{
+	{zcase: Zvex_r_v_rm, zoffset: 2, args: argList{Yxr, Yxr, Yxr}},
+	{zcase: Zvex_r_v_rm, zoffset: 2, args: argList{Yxr, Ym}},
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Ym, Yxr}},
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Yxr, Yxr, Yxr}},
+	{zcase: Zevex_r_v_rm, zoffset: 0, args: argList{YxrEvex, YxrEvex, YxrEvex}},
+	{zcase: Zevex_r_v_k_rm, zoffset: 3, args: argList{YxrEvex, YxrEvex, Yknot0, YxrEvex}},
+	{zcase: Zevex_r_v_rm, zoffset: 0, args: argList{YxrEvex, Ym}},
+	{zcase: Zevex_r_k_rm, zoffset: 3, args: argList{YxrEvex, Yknot0, Ym}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{Ym, YxrEvex}},
+	{zcase: Zevex_rm_k_r, zoffset: 3, args: argList{Ym, Yknot0, YxrEvex}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{YxrEvex, YxrEvex, YxrEvex}},
+	{zcase: Zevex_rm_v_k_r, zoffset: 3, args: argList{YxrEvex, YxrEvex, Yknot0, YxrEvex}},
+}
+----
+	{as: AVMOVSD, ytab: _yvmovsd, prefix: Pavx, op: opBytes{
+		avxEscape | vex128 | vexF2 | vex0F | vexW0, 0x11,
+		avxEscape | vex128 | vexF2 | vex0F | vexW0, 0x11,
+		avxEscape | vex128 | vexF2 | vex0F | vexW0, 0x10,
+		avxEscape | vex128 | vexF2 | vex0F | vexW0, 0x10,
+		avxEscape | evex128 | evexF2 | evex0F | evexW1, evexZeroingEnabled, 0x11,
+		avxEscape | evex128 | evexF2 | evex0F | evexW1, evexN8, 0x11,
+		avxEscape | evex128 | evexF2 | evex0F | evexW1, evexN8 | evexZeroingEnabled, 0x10,
+		avxEscape | evex128 | evexF2 | evex0F | evexW1, evexZeroingEnabled, 0x10,
+	}}
+
+======
+var _yvaddpd = []ytab{
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Yxm, Yxr, Yxr}},
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Yym, Yyr, Yyr}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{Yzm, Yzr, Yzr}},
+	{zcase: Zevex_rm_v_k_r, zoffset: 3, args: argList{Yzm, Yzr, Yknot0, Yzr}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{YxmEvex, YxrEvex, YxrEvex}},
+	{zcase: Zevex_rm_v_k_r, zoffset: 3, args: argList{YxmEvex, YxrEvex, Yknot0, YxrEvex}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{YymEvex, YyrEvex, YyrEvex}},
+	{zcase: Zevex_rm_v_k_r, zoffset: 3, args: argList{YymEvex, YyrEvex, Yknot0, YyrEvex}},
+}
+----
+	{as: AVADDPD, ytab: _yvaddpd, prefix: Pavx, op: opBytes{
+		avxEscape | vex128 | vex66 | vex0F | vexW0, 0x58,
+		avxEscape | vex256 | vex66 | vex0F | vexW0, 0x58,
+		avxEscape | evex512 | evex66 | evex0F | evexW1, evexN64 | evexBcstN8 | evexRoundingEnabled | evexZeroingEnabled, 0x58,
+		avxEscape | evex128 | evex66 | evex0F | evexW1, evexN16 | evexBcstN8 | evexZeroingEnabled, 0x58,
+		avxEscape | evex256 | evex66 | evex0F | evexW1, evexN32 | evexBcstN8 | evexZeroingEnabled, 0x58,
+	}}
+
+======
+var _yvcmppd = []ytab{
+	{zcase: Zvex_i_rm_v_r, zoffset: 2, args: argList{Yu8, Yxm, Yxr, Yxr}},
+	{zcase: Zvex_i_rm_v_r, zoffset: 2, args: argList{Yu8, Yym, Yyr, Yyr}},
+	{zcase: Zevex_i_rm_v_r, zoffset: 0, args: argList{Yu8, Yzm, Yzr, Yk}},
+	{zcase: Zevex_i_rm_v_k_r, zoffset: 3, args: argList{Yu8, Yzm, Yzr, Yknot0, Yk}},
+	{zcase: Zevex_i_rm_v_r, zoffset: 0, args: argList{Yu8, YxmEvex, YxrEvex, Yk}},
+	{zcase: Zevex_i_rm_v_k_r, zoffset: 3, args: argList{Yu8, YxmEvex, YxrEvex, Yknot0, Yk}},
+	{zcase: Zevex_i_rm_v_r, zoffset: 0, args: argList{Yu8, YymEvex, YyrEvex, Yk}},
+	{zcase: Zevex_i_rm_v_k_r, zoffset: 3, args: argList{Yu8, YymEvex, YyrEvex, Yknot0, Yk}},
+}
+----
+	{as: AVCMPPD, ytab: _yvcmppd, prefix: Pavx, op: opBytes{
+		avxEscape | vex128 | vex66 | vex0F | vexW0, 0xC2,
+		avxEscape | vex256 | vex66 | vex0F | vexW0, 0xC2,
+		avxEscape | evex512 | evex66 | evex0F | evexW1, evexN64 | evexBcstN8 | evexSaeEnabled, 0xC2,
+		avxEscape | evex128 | evex66 | evex0F | evexW1, evexN16 | evexBcstN8, 0xC2,
+		avxEscape | evex256 | evex66 | evex0F | evexW1, evexN32 | evexBcstN8, 0xC2,
+	}}
+
+======
+var _yvmovapd = []ytab{
+	{zcase: Zvex_r_v_rm, zoffset: 2, args: argList{Yxr, Yxm}},
+	{zcase: Zvex_r_v_rm, zoffset: 2, args: argList{Yyr, Yym}},
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Yxm, Yxr}},
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Yym, Yyr}},
+	{zcase: Zevex_r_v_rm, zoffset: 0, args: argList{YxrEvex, YxmEvex}},
+	{zcase: Zevex_r_k_rm, zoffset: 3, args: argList{YxrEvex, Yknot0, YxmEvex}},
+	{zcase: Zevex_r_v_rm, zoffset: 0, args: argList{YyrEvex, YymEvex}},
+	{zcase: Zevex_r_k_rm, zoffset: 3, args: argList{YyrEvex, Yknot0, YymEvex}},
+	{zcase: Zevex_r_v_rm, zoffset: 0, args: argList{Yzr, Yzm}},
+	{zcase: Zevex_r_k_rm, zoffset: 3, args: argList{Yzr, Yknot0, Yzm}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{YxmEvex, YxrEvex}},
+	{zcase: Zevex_rm_k_r, zoffset: 3, args: argList{YxmEvex, Yknot0, YxrEvex}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{YymEvex, YyrEvex}},
+	{zcase: Zevex_rm_k_r, zoffset: 3, args: argList{YymEvex, Yknot0, YyrEvex}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{Yzm, Yzr}},
+	{zcase: Zevex_rm_k_r, zoffset: 3, args: argList{Yzm, Yknot0, Yzr}},
+}
+----
+	{as: AVMOVAPD, ytab: _yvmovapd, prefix: Pavx, op: opBytes{
+		avxEscape | vex128 | vex66 | vex0F | vexW0, 0x29,
+		avxEscape | vex256 | vex66 | vex0F | vexW0, 0x29,
+		avxEscape | vex128 | vex66 | vex0F | vexW0, 0x28,
+		avxEscape | vex256 | vex66 | vex0F | vexW0, 0x28,
+		avxEscape | evex128 | evex66 | evex0F | evexW1, evexN16 | evexZeroingEnabled, 0x29,
+		avxEscape | evex256 | evex66 | evex0F | evexW1, evexN32 | evexZeroingEnabled, 0x29,
+		avxEscape | evex512 | evex66 | evex0F | evexW1, evexN64 | evexZeroingEnabled, 0x29,
+		avxEscape | evex128 | evex66 | evex0F | evexW1, evexN16 | evexZeroingEnabled, 0x28,
+		avxEscape | evex256 | evex66 | evex0F | evexW1, evexN32 | evexZeroingEnabled, 0x28,
+		avxEscape | evex512 | evex66 | evex0F | evexW1, evexN64 | evexZeroingEnabled, 0x28,
+	}}
+
+======
+var _yvpslld = []ytab{
+	{zcase: Zvex_i_rm_vo, zoffset: 3, args: argList{Yu8, Yxr, Yxr}},
+	{zcase: Zvex_i_rm_vo, zoffset: 3, args: argList{Yu8, Yyr, Yyr}},
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Yxm, Yxr, Yxr}},
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Yxm, Yyr, Yyr}},
+	{zcase: Zevex_i_rm_vo, zoffset: 0, args: argList{Yu8, YxmEvex, YxrEvex}},
+	{zcase: Zevex_i_rm_k_vo, zoffset: 4, args: argList{Yu8, YxmEvex, Yknot0, YxrEvex}},
+	{zcase: Zevex_i_rm_vo, zoffset: 0, args: argList{Yu8, YymEvex, YyrEvex}},
+	{zcase: Zevex_i_rm_k_vo, zoffset: 4, args: argList{Yu8, YymEvex, Yknot0, YyrEvex}},
+	{zcase: Zevex_i_rm_vo, zoffset: 0, args: argList{Yu8, Yzm, Yzr}},
+	{zcase: Zevex_i_rm_k_vo, zoffset: 4, args: argList{Yu8, Yzm, Yknot0, Yzr}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{YxmEvex, YxrEvex, YxrEvex}},
+	{zcase: Zevex_rm_v_k_r, zoffset: 3, args: argList{YxmEvex, YxrEvex, Yknot0, YxrEvex}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{YxmEvex, YyrEvex, YyrEvex}},
+	{zcase: Zevex_rm_v_k_r, zoffset: 3, args: argList{YxmEvex, YyrEvex, Yknot0, YyrEvex}},
+	{zcase: Zevex_rm_v_r, zoffset: 0, args: argList{YxmEvex, Yzr, Yzr}},
+	{zcase: Zevex_rm_v_k_r, zoffset: 3, args: argList{YxmEvex, Yzr, Yknot0, Yzr}},
+}
+----
+	{as: AVPSLLW, ytab: _yvpslld, prefix: Pavx, op: opBytes{
+		avxEscape | vex128 | vex66 | vex0F | vexW0, 0x71, 06,
+		avxEscape | vex256 | vex66 | vex0F | vexW0, 0x71, 06,
+		avxEscape | vex128 | vex66 | vex0F | vexW0, 0xF1,
+		avxEscape | vex256 | vex66 | vex0F | vexW0, 0xF1,
+		avxEscape | evex128 | evex66 | evex0F | evexW0, evexN16 | evexZeroingEnabled, 0x71, 06,
+		avxEscape | evex256 | evex66 | evex0F | evexW0, evexN32 | evexZeroingEnabled, 0x71, 06,
+		avxEscape | evex512 | evex66 | evex0F | evexW0, evexN64 | evexZeroingEnabled, 0x71, 06,
+		avxEscape | evex128 | evex66 | evex0F | evexW0, evexN16 | evexZeroingEnabled, 0xF1,
+		avxEscape | evex256 | evex66 | evex0F | evexW0, evexN16 | evexZeroingEnabled, 0xF1,
+		avxEscape | evex512 | evex66 | evex0F | evexW0, evexN16 | evexZeroingEnabled, 0xF1,
+	}}
+
+======
+var _yvzeroall = []ytab{
+	{zcase: Zvex, zoffset: 2, args: argList{}},
+}
+----
+	{as: AVZEROALL, ytab: _yvzeroall, prefix: Pavx, op: opBytes{
+		avxEscape | vex256 | vex0F | vexW0, 0x77,
+	}}
+
+======
+var _yvzeroall = []ytab{
+	{zcase: Zvex, zoffset: 2, args: argList{}},
+}
+----
+	{as: AVZEROUPPER, ytab: _yvzeroall, prefix: Pavx, op: opBytes{
+		avxEscape | vex128 | vex0F | vexW0, 0x77,
+	}}
+
+======
+var _yvcomisd = []ytab{
+	{zcase: Zvex_rm_v_r, zoffset: 2, args: argList{Yxm, Yxr}},
+	{zcase: Zevex_rm_v_r, zoffset: 3, args: argList{YxmEvex, YxrEvex}},
+}
+----
+	{as: AVUCOMISD, ytab: _yvcomisd, prefix: Pavx, op: opBytes{
+		avxEscape | vex128 | vex66 | vex0F | vexW0, 0x2E,
+		avxEscape | evex128 | evex66 | evex0F | evexW1, evexN8 | evexSaeEnabled, 0x2E,
+	}}
diff --git a/x86/x86avxgen/testdata/xedpath/all-dec-instructions.txt b/x86/x86avxgen/testdata/xedpath/all-dec-instructions.txt
new file mode 100644
index 0000000..07cbc41
--- /dev/null
+++ b/x86/x86avxgen/testdata/xedpath/all-dec-instructions.txt
@@ -0,0 +1,57893 @@
+AVX_INSTRUCTIONS()::
+
+{
+ICLASS: VFMADDSUBPS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x5C V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x5C V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x5C V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN: VV1 0x5C V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:dq:f32
+
+PATTERN: VV1 0x5C V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 REG2=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x5C V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 REG3=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x5C V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN: VV1 0x5C V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 REG3=YMM_B():r:qq:f32
+}
+
+{
+ICLASS: VFMADDSUBPD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x5D V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x5D V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x5D V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN: VV1 0x5D V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:dq:f64
+
+PATTERN: VV1 0x5D V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 REG2=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x5D V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 REG3=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x5D V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN: VV1 0x5D V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 REG3=YMM_B():r:qq:f64
+}
+
+{
+ICLASS: VFMSUBADDPS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x5E V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x5E V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x5E V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN: VV1 0x5E V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:dq:f32
+
+PATTERN: VV1 0x5E V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 REG2=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x5E V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 REG3=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x5E V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN: VV1 0x5E V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 REG3=YMM_B():r:qq:f32
+}
+
+{
+ICLASS: VFMSUBADDPD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x5F V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x5F V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x5F V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN: VV1 0x5F V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:dq:f64
+
+PATTERN: VV1 0x5F V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 REG2=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x5F V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 REG3=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x5F V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN: VV1 0x5F V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 REG3=YMM_B():r:qq:f64
+}
+
+{
+ICLASS: VFMADDPS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x68 V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x68 V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x68 V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN: VV1 0x68 V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:dq:f32
+
+PATTERN: VV1 0x68 V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 REG2=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x68 V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 REG3=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x68 V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN: VV1 0x68 V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 REG3=YMM_B():r:qq:f32
+}
+
+{
+ICLASS: VFMADDPD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x69 V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x69 V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x69 V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN: VV1 0x69 V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:dq:f64
+
+PATTERN: VV1 0x69 V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 REG2=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x69 V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 REG3=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x69 V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN: VV1 0x69 V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 REG3=YMM_B():r:qq:f64
+}
+
+{
+ICLASS: VFMADDSS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: SIMD_SCALAR MXCSR AMDONLY
+
+PATTERN: VV1 0x6A V66 W0  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x6A V66 W0  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x6A V66 W1  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:d:f32
+
+PATTERN: VV1 0x6A V66 W1  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:d:f32
+}
+
+{
+ICLASS: VFMADDSD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: SIMD_SCALAR MXCSR AMDONLY
+
+PATTERN: VV1 0x6B V66 W0  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x6B V66 W0  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x6B V66 W1  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:q:f64
+
+PATTERN: VV1 0x6B V66 W1  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:q:f64
+}
+
+{
+ICLASS: VFMSUBPS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x6C V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x6C V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x6C V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN: VV1 0x6C V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:dq:f32
+
+PATTERN: VV1 0x6C V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 REG2=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x6C V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 REG3=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x6C V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN: VV1 0x6C V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 REG3=YMM_B():r:qq:f32
+}
+
+{
+ICLASS: VFMSUBPD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x6D V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x6D V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x6D V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN: VV1 0x6D V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:dq:f64
+
+PATTERN: VV1 0x6D V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 REG2=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x6D V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 REG3=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x6D V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN: VV1 0x6D V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 REG3=YMM_B():r:qq:f64
+}
+
+{
+ICLASS: VFMSUBSS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: SIMD_SCALAR MXCSR AMDONLY
+
+PATTERN: VV1 0x6E V66 W0  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x6E V66 W0  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x6E V66 W1  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:d:f32
+
+PATTERN: VV1 0x6E V66 W1  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:d:f32
+}
+
+{
+ICLASS: VFMSUBSD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: SIMD_SCALAR MXCSR AMDONLY
+
+PATTERN: VV1 0x6F V66 W0  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x6F V66 W0  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x6F V66 W1  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:q:f64
+
+PATTERN: VV1 0x6F V66 W1  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:q:f64
+}
+
+{
+ICLASS: VFNMADDPS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x78 V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x78 V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x78 V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN: VV1 0x78 V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:dq:f32
+
+PATTERN: VV1 0x78 V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 REG2=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x78 V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 REG3=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x78 V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN: VV1 0x78 V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 REG3=YMM_B():r:qq:f32
+}
+
+{
+ICLASS: VFNMADDPD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x79 V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x79 V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x79 V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN: VV1 0x79 V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:dq:f64
+
+PATTERN: VV1 0x79 V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 REG2=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x79 V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 REG3=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x79 V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN: VV1 0x79 V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 REG3=YMM_B():r:qq:f64
+}
+
+{
+ICLASS: VFNMADDSS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: SIMD_SCALAR MXCSR AMDONLY
+
+PATTERN: VV1 0x7A V66 W0  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x7A V66 W0  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x7A V66 W1  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:d:f32
+
+PATTERN: VV1 0x7A V66 W1  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:d:f32
+}
+
+{
+ICLASS: VFNMADDSD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: SIMD_SCALAR MXCSR AMDONLY
+
+PATTERN: VV1 0x7B V66 W0  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x7B V66 W0  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x7B V66 W1  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:q:f64
+
+PATTERN: VV1 0x7B V66 W1  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:q:f64
+}
+
+{
+ICLASS: VFNMSUBPS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x7C V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x7C V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x7C V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN: VV1 0x7C V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:dq:f32
+
+PATTERN: VV1 0x7C V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 REG2=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x7C V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 REG3=YMM_SE():r:qq:f32
+
+PATTERN: VV1 0x7C V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN: VV1 0x7C V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 REG3=YMM_B():r:qq:f32
+}
+
+{
+ICLASS: VFNMSUBPD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: MXCSR AMDONLY
+
+PATTERN: VV1 0x7D V66 W0 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x7D V66 W0 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x7D V66 W1 VL128  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN: VV1 0x7D V66 W1 VL128  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:dq:f64
+
+PATTERN: VV1 0x7D V66 W0 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 REG2=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x7D V66 W0 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 REG3=YMM_SE():r:qq:f64
+
+PATTERN: VV1 0x7D V66 W1 VL256  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN: VV1 0x7D V66 W1 VL256  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 REG3=YMM_B():r:qq:f64
+}
+
+{
+ICLASS: VFNMSUBSS
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: SIMD_SCALAR MXCSR AMDONLY
+
+PATTERN: VV1 0x7E V66 W0  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32 REG2=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x7E V66 W0  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32 REG3=XMM_SE():r:dq:f32
+
+PATTERN: VV1 0x7E V66 W1  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:d:f32
+
+PATTERN: VV1 0x7E V66 W1  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:d:f32
+}
+
+{
+ICLASS: VFNMSUBSD
+CPL: 3
+CATEGORY: FMA4
+ISA_SET: FMA4
+EXTENSION: FMA4
+ATTRIBUTES: SIMD_SCALAR MXCSR AMDONLY
+
+PATTERN: VV1 0x7F V66 W0  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64 REG2=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x7F V66 W0  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64 REG3=XMM_SE():r:dq:f64
+
+PATTERN: VV1 0x7F V66 W1  V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:q:f64
+
+PATTERN: VV1 0x7F V66 W1  V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS: REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:q:f64
+}
+
+
+###FILE: ./datafiles/amdxop/amd-vpermil2-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+AVX_INSTRUCTIONS()::
+
+
+{
+ICLASS    : VPERMIL2PS
+CPL       : 3
+CATEGORY  : XOP
+EXTENSION : XOP
+ISA_SET   : XOP
+ATTRIBUTES : AMDONLY
+
+# 128b W0
+PATTERN : VV1 0x48 VL128 V66 V0F3A W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 REG2=XMM_SE():r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x48 VL128 V66 V0F3A W0  MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 REG3=XMM_SE():r:dq:f32 IMM0:r:b
+
+# 256b W0
+PATTERN : VV1 0x48 VL256 V66 V0F3A W0   MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 REG2=YMM_SE():r:qq:f32 IMM0:r:b
+
+PATTERN : VV1 0x48 VL256 V66 V0F3A W0   MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 REG3=YMM_SE():r:qq:f32 IMM0:r:b
+
+# 128b W1
+PATTERN : VV1 0x48 VL128 V66 V0F3A W1  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 MEM0:r:dq:f32  IMM0:r:b
+
+PATTERN : VV1 0x48 VL128 V66 V0F3A W1  MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_SE():r:dq:f32 REG3=XMM_B():r:dq:f32  IMM0:r:b
+
+# 256b W1
+PATTERN : VV1 0x48 VL256 V66 V0F3A W1   MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32  REG2=YMM_SE():r:qq:f32  MEM0:r:qq:f32 IMM0:r:b
+
+PATTERN : VV1 0x48 VL256 V66 V0F3A W1   MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_SE():r:qq:f32 REG3=YMM_B():r:qq:f32  IMM0:r:b
+
+}
+
+
+
+{
+ICLASS    : VPERMIL2PD
+CPL       : 3
+CATEGORY  : XOP
+EXTENSION : XOP
+ISA_SET   : XOP
+ATTRIBUTES : AMDONLY
+
+# 128b W0
+PATTERN : VV1 0x49 VL128 V66 V0F3A W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 REG2=XMM_SE():r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0x49 VL128 V66 V0F3A W0  MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 REG3=XMM_SE():r:dq:f64 IMM0:r:b
+
+# 256b W0
+PATTERN : VV1 0x49 VL256 V66 V0F3A W0   MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 REG2=YMM_SE():r:qq:f64 IMM0:r:b
+
+PATTERN : VV1 0x49 VL256 V66 V0F3A W0   MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 REG3=YMM_SE():r:qq:f64 IMM0:r:b
+
+# 128b W1
+PATTERN : VV1 0x49 VL128 V66 V0F3A W1  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 MEM0:r:dq:f64  IMM0:r:b
+
+PATTERN : VV1 0x49 VL128 V66 V0F3A W1  MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_SE():r:dq:f64 REG3=XMM_B():r:dq:f64  IMM0:r:b
+
+# 256b W1
+PATTERN : VV1 0x49 VL256 V66 V0F3A W1   MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64  REG2=YMM_SE():r:qq:f64  MEM0:r:qq:f64 IMM0:r:b
+
+PATTERN : VV1 0x49 VL256 V66 V0F3A W1   MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_SE():r:qq:f64 REG3=YMM_B():r:qq:f64  IMM0:r:b
+
+}
+
+
+
+###FILE: ./datafiles/xsaveopt/xsaveopt-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : XSAVEOPT
+CPL       : 3
+CATEGORY  : XSAVEOPT
+EXTENSION : XSAVEOPT
+ATTRIBUTES : xmm_state_r REQUIRES_ALIGNMENT  x87_mmx_state_r NOTSX
+PATTERN   : 0x0F 0xAE MOD[mm] MOD!=3 REG[0b110] RM[nnn]  no_refining_prefix norexw_prefix MODRM()
+#FIXME 2007-06-25 need a meaningful width code for XSAVE/XSAVEOPT/XRSTOR
+OPERANDS  : MEM0:w:mxsave REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_XCR0:r:SUPP
+}
+
+
+{
+ICLASS    : XSAVEOPT64
+CPL       : 3
+CATEGORY  : XSAVEOPT
+EXTENSION : XSAVEOPT
+ATTRIBUTES : xmm_state_r REQUIRES_ALIGNMENT  x87_mmx_state_r NOTSX
+
+PATTERN   : 0x0F 0xAE MOD[mm] MOD!=3 REG[0b110] RM[nnn] no_refining_prefix rexw_prefix MODRM()
+#FIXME 2007-06-25 need a meaningful width code for XSAVE/XSAVEOPT/XRSTOR
+OPERANDS  : MEM0:w:mxsave REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_XCR0:r:SUPP
+}
+
+
+
+###FILE: ./datafiles/mpx/mpx-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+
+INSTRUCTIONS()::
+
+
+UDELETE: NOP0F1A
+UDELETE: NOP0F1B
+
+
+
+{
+ICLASS: BNDMK
+EXTENSION: MPX
+CATEGORY:  MPX
+ISA_SET:   MPX
+ATTRIBUTES: NO_RIP_REL
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  f3_refining_prefix
+OPERANDS: REG0=BND_R():w  AGEN:r
+}
+
+
+
+
+{
+ICLASS: BNDCL
+EXTENSION: MPX
+CATEGORY:  MPX
+ISA_SET:   MPX
+ATTRIBUTES: EXCEPTION_BR
+COMMENT: 67 prefixes will be misinterpreted on MPX NI. XED cannot ignore them.
+PATTERN:  0x0F 0x1A MPXMODE=1  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  f3_refining_prefix
+OPERANDS: REG0=BND_R():r AGEN:r
+
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD=3 REG[rrr] RM[nnn]   f3_refining_prefix  mode64
+OPERANDS: REG0=BND_R():r REG1=GPR64_B():r
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD=3 REG[rrr] RM[nnn]   f3_refining_prefix  not64
+OPERANDS: REG0=BND_R():r REG1=GPR32_B():r
+}
+
+{
+ICLASS: BNDCU
+EXTENSION: MPX
+CATEGORY:  MPX
+ISA_SET:   MPX
+ATTRIBUTES: EXCEPTION_BR
+COMMENT: 67 prefixes will be misinterpreted on MPX NI. XED cannot ignore them.
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  f2_refining_prefix
+OPERANDS: REG0=BND_R():r AGEN:r
+
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD=3 REG[rrr] RM[nnn]   f2_refining_prefix  mode64
+OPERANDS: REG0=BND_R():r REG1=GPR64_B():r
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD=3 REG[rrr] RM[nnn]   f2_refining_prefix  not64
+OPERANDS: REG0=BND_R():r REG1=GPR32_B():r
+}
+
+{
+ICLASS: BNDCN
+EXTENSION: MPX
+CATEGORY:  MPX
+ISA_SET:   MPX
+ATTRIBUTES:  EXCEPTION_BR
+COMMENT: 67 prefixes will be misinterpreted on MPX NI. XED cannot ignore them.
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() f2_refining_prefix
+OPERANDS: REG0=BND_R():r AGEN:r
+
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD=3 REG[rrr] RM[nnn]  f2_refining_prefix  mode64
+OPERANDS: REG0=BND_R():r REG1=GPR64_B():r
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD=3 REG[rrr] RM[nnn]  f2_refining_prefix  not64
+OPERANDS: REG0=BND_R():r REG1=GPR32_B():r
+
+}
+
+{
+ICLASS: BNDMOV
+EXTENSION: MPX
+CATEGORY:  MPX
+ISA_SET:   MPX
+ATTRIBUTES:
+COMMENT: load form
+
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]  osz_refining_prefix REFINING66()
+OPERANDS: REG0=BND_R():w REG1=BND_B():r
+
+# 16b refs 64b memop (2x32b) but only if EASZ=32b!
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  osz_refining_prefix REFINING66() mode16 eamode32
+OPERANDS: REG0=BND_R():w MEM0:r:q:u32
+
+# 32b refs 64b memop (2x32b)
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  osz_refining_prefix REFINING66() mode32 eamode32
+OPERANDS: REG0=BND_R():w MEM0:r:q:u32
+
+# 64b refs 128b memop (2x64b)
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  osz_refining_prefix REFINING66() mode64
+OPERANDS: REG0=BND_R():w MEM0:r:dq:u64
+
+
+
+}
+
+{
+ICLASS: BNDMOV
+EXTENSION: MPX
+CATEGORY:  MPX
+ISA_SET:   MPX
+ATTRIBUTES:
+COMMENT: store form
+
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[0b11] MOD=3 REG[rrr] RM[nnn] osz_refining_prefix REFINING66()
+OPERANDS: REG0=BND_B():w   REG1=BND_R():r
+
+# 16b refs 64b memop (2x32b) but only if EASZ=32b!
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  osz_refining_prefix REFINING66() mode16 eamode32
+OPERANDS: MEM0:w:q:u32 REG0=BND_R():r
+
+# 32b refs 64b memop (2x32b)
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  osz_refining_prefix REFINING66() mode32
+OPERANDS: MEM0:w:q:u32 REG0=BND_R():r
+
+# 64b refs 128b memop (2x64b)
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  osz_refining_prefix REFINING66() mode64
+OPERANDS: MEM0:w:dq:u64 REG0=BND_R():r
+}
+
+
+{
+ICLASS: BNDLDX
+EXTENSION: MPX
+CATEGORY:  MPX
+ISA_SET:   MPX
+ATTRIBUTES:  EXCEPTION_BR SPECIAL_AGEN_REQUIRED INDEX_REG_IS_POINTER NO_RIP_REL
+COMMENT:  RIP (mode64, easz64, MOD=0, RM=5) mode disallowed in 64b And 16/32b is easize32 only
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix not64 eamode32
+OPERANDS: REG0=BND_R():w MEM0:r:bnd32
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD=0 REG[rrr] RM[nnn]   MODRM()  no_refining_prefix mode64  # RM!=5
+OPERANDS: REG0=BND_R():w MEM0:r:bnd64
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD=1 REG[rrr] RM[nnn]   MODRM()  no_refining_prefix mode64
+OPERANDS: REG0=BND_R():w MEM0:r:bnd64
+PATTERN:  0x0F 0x1A MPXMODE=1 MOD[mm] MOD=2 REG[rrr] RM[nnn]   MODRM()  no_refining_prefix mode64
+OPERANDS: REG0=BND_R():w MEM0:r:bnd64
+}
+
+{
+ICLASS: BNDSTX
+EXTENSION: MPX
+CATEGORY:  MPX
+ISA_SET:   MPX
+ATTRIBUTES:  EXCEPTION_BR SPECIAL_AGEN_REQUIRED INDEX_REG_IS_POINTER NO_RIP_REL
+COMMENT:  RIP (mode64, easz64, MOD=0, RM=5) mode disallowed in 64b And 16/32b is easize32 only
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  no_refining_prefix not64 eamode32
+OPERANDS: MEM0:w:bnd32 REG0=BND_R():r
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD=0 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix mode64 # RM!=5
+OPERANDS: MEM0:w:bnd64 REG0=BND_R():r
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD=1 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix mode64
+OPERANDS: MEM0:w:bnd64 REG0=BND_R():r
+PATTERN:  0x0F 0x1B MPXMODE=1 MOD[mm] MOD=2 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix mode64
+OPERANDS: MEM0:w:bnd64 REG0=BND_R():r
+}
+
+{
+ICLASS    : NOP
+CPL       : 3
+CATEGORY  : WIDENOP
+ATTRIBUTES: NOP
+EXTENSION : BASE
+ISA_SET   : PPRO
+COMMENT   : MPXMODE=1: some of the reg/reg forms of these NOPs are still NOPs.
+
+PATTERN   : 0x0F 0x1A MPXMODE=1 MOD[0b11] MOD=3 REG[rrr] RM[nnn] no_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1A
+
+PATTERN   : 0x0F 0x1B MPXMODE=1 MOD[0b11] MOD=3 REG[rrr] RM[nnn] no_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1B
+
+PATTERN   : 0x0F 0x1B MPXMODE=1 MOD[0b11] MOD=3 REG[rrr] RM[nnn] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1B
+}
+
+
+{
+ICLASS    : NOP
+CPL       : 3
+CATEGORY  : WIDENOP
+ATTRIBUTES: NOP
+EXTENSION : BASE
+ISA_SET   : PPRO
+COMMENT   : For MPXMODE=0 operation
+
+PATTERN   : 0x0F 0x1A MPXMODE=0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1A
+
+PATTERN   : 0x0F 0x1B MPXMODE=0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1B
+
+PATTERN   : 0x0F 0x1A MPXMODE=0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPRv_B():r MEM0:r:v
+IFORM     : NOP_GPRv_MEMv_0F1A
+
+PATTERN   : 0x0F 0x1B MPXMODE=0 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()
+OPERANDS  : REG0=GPRv_B():r MEM0:r:v
+IFORM     : NOP_GPRv_MEM_0F1B
+}
+
+
+
+
+###FILE: ./datafiles/cet/cet-nop-remove.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+
+INSTRUCTIONS()::
+
+UDELETE: NOP0F1E
+
+{
+ICLASS    : NOP
+#UNAME     : NOP0F1E
+CPL       : 3
+CATEGORY  : WIDENOP
+EXTENSION : BASE
+ATTRIBUTES: NOP
+ISA_SET   : PPRO
+COMMENT   : reg form MODRM.MOD=3 & MODRM.REG=0b001  f3 prefix is RDSSP{D,Q}
+
+# mem forms
+
+PATTERN   : 0x0F 0x1E MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:r:v REG0=GPRv_R():r
+IFORM     : NOP_MEMv_GPRv_0F1E
+
+
+# reg forms
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[rrr] RM[nnn] no_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[rrr] RM[nnn] f2_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[rrr] RM[nnn] osz_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+
+
+
+
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b000] RM[nnn] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+# ...
+# F3 with MODRM.REG=0b001 is for CET for all values of RM.
+# ...
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b010] RM[nnn] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b011] RM[nnn] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b100] RM[nnn] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b101] RM[nnn] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b110] RM[nnn] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b111] RM[0b000] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b111] RM[0b001] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+# ...
+# F3 with MODRM.REG=0b111  with RM=2 or RM=3 is for CET
+# ...
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b111] RM[0b100] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b111] RM[0b101] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b111] RM[0b110] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3 REG[0b111] RM[0b111] f3_refining_prefix
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+
+}
+
+
+# REPLACE CERTAIN NOPS WITH MODAL OPTIONS  basd on CET=0/1
+{
+ICLASS    : NOP
+#UNAME     : NOP0F1E
+CPL       : 3
+CATEGORY  : WIDENOP
+EXTENSION : BASE
+ATTRIBUTES: NOP
+ISA_SET   : PPRO
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3  REG[0b111] RM[0b010]  f3_refining_prefix CET=0
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3  REG[0b111] RM[0b011]  f3_refining_prefix CET=0
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+}
+
+
+{
+ICLASS    : NOP
+#UNAME     : NOP0F1E
+CPL       : 3
+CATEGORY  : WIDENOP
+EXTENSION : BASE
+ATTRIBUTES: NOP
+ISA_SET   : PPRO
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3  REG[0b001] RM[nnn]  f3_refining_prefix W0 CET=0
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+
+PATTERN   : 0x0F 0x1E MOD[0b11] MOD=3  REG[0b001] RM[nnn]  f3_refining_prefix W1 mode64  CET=0
+OPERANDS  : REG0=GPRv_B():r REG1=GPRv_R():r
+IFORM     : NOP_GPRv_GPRv_0F1E
+}
+
+
+###FILE: ./datafiles/cet/cet-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+INSTRUCTIONS()::
+# EMITTING CLRSSBSY (CLRSSBSY-N/A-1)
+{
+ICLASS:      CLRSSBSY
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0xAE MOD[mm] MOD!=3 REG[0b110] RM[nnn]   f3_refining_prefix     MODRM()
+OPERANDS:    MEM0:w:q:u64
+IFORM:       CLRSSBSY_MEMu64
+}
+
+
+# EMITTING ENDBR32 (ENDBR32-N/A-1)
+{
+ICLASS:      ENDBR32
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x1E MOD[0b11] MOD=3  REG[0b111] RM[0b011]  f3_refining_prefix     CET=1
+OPERANDS:
+IFORM:       ENDBR32
+}
+
+
+# EMITTING ENDBR64 (ENDBR64-N/A-1)
+{
+ICLASS:      ENDBR64
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x1E MOD[0b11] MOD=3  REG[0b111] RM[0b010]  f3_refining_prefix     CET=1
+OPERANDS:
+IFORM:       ENDBR64
+}
+
+
+# EMITTING INCSSPD (INCSSPD-N/A-1)
+{
+ICLASS:      INCSSPD
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0xAE MOD[0b11] MOD=3  REG[0b101] RM[nnn]  f3_refining_prefix    W0
+OPERANDS:    REG0=GPR32_B():r:d:u8 REG1=XED_REG_SSP:rw:SUPP:u64
+IFORM:       INCSSPD_GPR32u8
+}
+
+
+# EMITTING INCSSPQ (INCSSPQ-N/A-1)
+{
+ICLASS:      INCSSPQ
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0xAE MOD[0b11] MOD=3  REG[0b101] RM[nnn]  f3_refining_prefix    W1  mode64
+OPERANDS:    REG0=GPR64_B():r:q:u8 REG1=XED_REG_SSP:rw:SUPP:u64
+IFORM:       INCSSPQ_GPR64u8
+}
+
+
+# EMITTING RDSSPD (RDSSPD-N/A-1)
+{
+ICLASS:      RDSSPD
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x1E MOD[0b11] MOD=3  REG[0b001] RM[nnn]  f3_refining_prefix    W0 CET=1
+OPERANDS:    REG0=GPR32_B():w:d:u32 REG1=XED_REG_SSP:r:SUPP:u64
+IFORM:       RDSSPD_GPR32u32
+}
+
+
+# EMITTING RDSSPQ (RDSSPQ-N/A-1)
+{
+ICLASS:      RDSSPQ
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x1E MOD[0b11] MOD=3  REG[0b001] RM[nnn]  f3_refining_prefix    W1  mode64 CET=1
+OPERANDS:    REG0=GPR64_B():w:q:u64 REG1=XED_REG_SSP:r:SUPP:u64
+IFORM:       RDSSPQ_GPR64u64
+}
+
+
+# EMITTING RSTORSSP (RSTORSSP-N/A-1)
+{
+ICLASS:      RSTORSSP
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x01 MOD[mm] MOD!=3 REG[0b101] RM[nnn]  MODRM()  f3_refining_prefix
+OPERANDS:    MEM0:rw:q:u64 REG0=XED_REG_SSP:w:SUPP:u64
+IFORM:       RSTORSSP_MEMu64
+}
+
+
+# EMITTING SAVESSP (SAVESSP-N/A-1)
+{
+ICLASS:      SAVESSP
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x01 MOD[0b11] MOD=3  REG[0b101] RM[0b010]  f3_refining_prefix
+OPERANDS:    REG0=XED_REG_SSP:r:SUPP:u64
+IFORM:       SAVESSP
+}
+
+
+# EMITTING SETSSBSY (SETSSBSY-N/A-1)
+{
+ICLASS:      SETSSBSY
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x01 MOD[0b11] MOD=3  REG[0b101] RM[0b000]  f3_refining_prefix
+OPERANDS:
+IFORM:       SETSSBSY
+}
+
+
+# EMITTING WRSSD (WRSSD-N/A-1)
+{
+ICLASS:      WRSSD
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x38 0xF6 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix    W0
+OPERANDS:    MEM0:w:d:u32 REG0=GPR32_R():r:d:u32
+IFORM:       WRSSD_MEMu32_GPR32u32
+}
+
+
+# EMITTING WRSSQ (WRSSQ-N/A-1)
+{
+ICLASS:      WRSSQ
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x38 0xF6 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix    W1  mode64
+OPERANDS:    MEM0:w:q:u64 REG0=GPR64_R():r:q:u64
+IFORM:       WRSSQ_MEMu64_GPR64u64
+}
+
+
+# EMITTING WRUSSD (WRUSSD-N/A-1)
+{
+ICLASS:      WRUSSD
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x38 0xF5 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  osz_refining_prefix    W0
+OPERANDS:    MEM0:w:d:u32 REG0=GPR32_R():r:d:u32
+IFORM:       WRUSSD_MEMu32_GPR32u32
+}
+
+
+# EMITTING WRUSSQ (WRUSSQ-N/A-1)
+{
+ICLASS:      WRUSSQ
+CPL:         3
+CATEGORY:    CET
+EXTENSION:   CET
+ISA_SET:     CET
+REAL_OPCODE: Y
+PATTERN:    0x0F 0x38 0xF5 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  osz_refining_prefix    W1  mode64
+OPERANDS:    MEM0:w:q:u64 REG0=GPR64_R():r:q:u64
+IFORM:       WRUSSQ_MEMu64_GPR64u64
+}
+
+
+
+
+###FILE: ./datafiles/sha/sha-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+INSTRUCTIONS()::
+# EMITTING SHA1MSG1 (SHA1MSG1-N/A-1)
+{
+ICLASS:      SHA1MSG1
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x38 0xC9 MOD[0b11] MOD=3  REG[rrr] RM[nnn]  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 REG1=XMM_B():r:dq:i32
+IFORM:       SHA1MSG1_XMMi32_XMMi32_SHA
+}
+
+{
+ICLASS:      SHA1MSG1
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+ATTRIBUTES:  REQUIRES_ALIGNMENT
+PATTERN:     0x0F 0x38 0xC9 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 MEM0:r:dq:i32
+IFORM:       SHA1MSG1_XMMi32_MEMi32_SHA
+}
+
+
+# EMITTING SHA1MSG2 (SHA1MSG2-N/A-1)
+{
+ICLASS:      SHA1MSG2
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x38 0xCA MOD[0b11] MOD=3  REG[rrr] RM[nnn]  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 REG1=XMM_B():r:dq:i32
+IFORM:       SHA1MSG2_XMMi32_XMMi32_SHA
+}
+
+{
+ICLASS:      SHA1MSG2
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+ATTRIBUTES:  REQUIRES_ALIGNMENT
+PATTERN:     0x0F 0x38 0xCA MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 MEM0:r:dq:i32
+IFORM:       SHA1MSG2_XMMi32_MEMi32_SHA
+}
+
+
+# EMITTING SHA1NEXTE (SHA1NEXTE-N/A-1)
+{
+ICLASS:      SHA1NEXTE
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x38 0xC8 MOD[0b11] MOD=3  REG[rrr] RM[nnn]  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 REG1=XMM_B():r:dq:i32
+IFORM:       SHA1NEXTE_XMMi32_XMMi32_SHA
+}
+
+{
+ICLASS:      SHA1NEXTE
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+ATTRIBUTES:  REQUIRES_ALIGNMENT
+PATTERN:     0x0F 0x38 0xC8 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 MEM0:r:dq:i32
+IFORM:       SHA1NEXTE_XMMi32_MEMi32_SHA
+}
+
+
+# EMITTING SHA1RNDS4 (SHA1RNDS4-N/A-1)
+{
+ICLASS:      SHA1RNDS4
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x3A 0xCC MOD[0b11] MOD=3  REG[rrr] RM[nnn]  no_refining_prefix     UIMM8()
+OPERANDS:    REG0=XMM_R():rw:dq:i32 REG1=XMM_B():r:dq:i32 IMM0:r:b
+IFORM:       SHA1RNDS4_XMMi32_XMMi32_IMM8_SHA
+}
+
+{
+ICLASS:      SHA1RNDS4
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+ATTRIBUTES:  REQUIRES_ALIGNMENT
+PATTERN:     0x0F 0x3A 0xCC MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix     UIMM8()
+OPERANDS:    REG0=XMM_R():rw:dq:i32 MEM0:r:dq:i32 IMM0:r:b
+IFORM:       SHA1RNDS4_XMMi32_MEMi32_IMM8_SHA
+}
+
+
+# EMITTING SHA256MSG1 (SHA256MSG1-N/A-1)
+{
+ICLASS:      SHA256MSG1
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x38 0xCC MOD[0b11] MOD=3  REG[rrr] RM[nnn]  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 REG1=XMM_B():r:dq:i32
+IFORM:       SHA256MSG1_XMMi32_XMMi32_SHA
+}
+
+{
+ICLASS:      SHA256MSG1
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+ATTRIBUTES:  REQUIRES_ALIGNMENT
+PATTERN:     0x0F 0x38 0xCC MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 MEM0:r:dq:i32
+IFORM:       SHA256MSG1_XMMi32_MEMi32_SHA
+}
+
+
+# EMITTING SHA256MSG2 (SHA256MSG2-N/A-1)
+{
+ICLASS:      SHA256MSG2
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x38 0xCD MOD[0b11] MOD=3  REG[rrr] RM[nnn]  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 REG1=XMM_B():r:dq:i32
+IFORM:       SHA256MSG2_XMMi32_XMMi32_SHA
+}
+
+{
+ICLASS:      SHA256MSG2
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+ATTRIBUTES:  REQUIRES_ALIGNMENT
+PATTERN:     0x0F 0x38 0xCD MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 MEM0:r:dq:i32
+IFORM:       SHA256MSG2_XMMi32_MEMi32_SHA
+}
+
+
+# EMITTING SHA256RNDS2 (SHA256RNDS2-N/A-1)
+{
+ICLASS:      SHA256RNDS2
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x38 0xCB MOD[0b11] MOD=3  REG[rrr] RM[nnn]  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 REG1=XMM_B():r:dq:i32 REG2=XED_REG_XMM0:r:SUPP:dq:u8
+IFORM:       SHA256RNDS2_XMMi32_XMMi32_SHA
+}
+
+{
+ICLASS:      SHA256RNDS2
+CPL:         3
+CATEGORY:    SHA
+EXTENSION:   SHA
+ISA_SET:     SHA
+EXCEPTIONS:     SSE_TYPE_4
+REAL_OPCODE: Y
+ATTRIBUTES:  REQUIRES_ALIGNMENT
+PATTERN:     0x0F 0x38 0xCB MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  no_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:i32 MEM0:r:dq:i32 REG1=XED_REG_XMM0:r:SUPP:dq:u8
+IFORM:       SHA256RNDS2_XMMi32_MEMi32_SHA
+}
+
+
+
+
+###FILE: ./datafiles/ivbint/ivb-int-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : RDRAND
+CPL       : 3
+CATEGORY  : RDRAND
+EXTENSION : RDRAND
+ISA_SET   : RDRAND
+FLAGS     : MUST [ cf-mod zf-0 of-0 af-0 pf-0 sf-0 ]
+PATTERN   : 0x0F 0xC7  MOD[0b11] MOD=3 REG[0b110] RM[nnn] not_refining
+OPERANDS  : REG0=GPRv_B():w
+}
+
+
+
+###FILE: ./datafiles/ivbint/fsgsbase-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+
+{
+ICLASS    : RDFSBASE
+CPL       : 3
+CATEGORY  : RDWRFSGS
+EXTENSION : RDWRFSGS
+
+PATTERN   : 0x0F 0xAE MOD[0b11] MOD=3 REG[0b000] RM[nnn] mode64 f3_refining_prefix
+OPERANDS  : REG0=GPRy_B():w  REG1=XED_REG_FSBASE:r:SUPP:y
+
+}
+{
+ICLASS    : RDGSBASE
+CPL       : 3
+CATEGORY  : RDWRFSGS
+EXTENSION : RDWRFSGS
+
+PATTERN   : 0x0F 0xAE MOD[0b11] MOD=3 REG[0b001] RM[nnn] mode64 f3_refining_prefix
+OPERANDS  : REG0=GPRy_B():w  REG1=XED_REG_GSBASE:r:SUPP:y
+
+}
+
+
+
+{
+ICLASS    : WRFSBASE
+CPL       : 3
+CATEGORY  : RDWRFSGS
+EXTENSION : RDWRFSGS
+ATTRIBUTES: NOTSX
+
+PATTERN   : 0x0F 0xAE MOD[0b11] MOD=3 REG[0b010] RM[nnn] mode64 f3_refining_prefix
+OPERANDS  :   REG0=GPRy_B():r   REG1=XED_REG_FSBASE:w:SUPP:y
+
+}
+{
+ICLASS    : WRGSBASE
+CPL       : 3
+CATEGORY  : RDWRFSGS
+EXTENSION : RDWRFSGS
+ATTRIBUTES: NOTSX
+
+PATTERN   : 0x0F 0xAE MOD[0b11] MOD=3 REG[0b011] RM[nnn] mode64 f3_refining_prefix
+OPERANDS  :   REG0=GPRy_B():r   REG1=XED_REG_GSBASE:w:SUPP:y
+
+}
+
+
+###FILE: ./datafiles/xsaves/xsaves-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : XSAVES
+CPL       : 0
+CATEGORY  : XSAVE
+EXTENSION : XSAVES
+COMMENT   : variable length load and conditianal reg write
+ATTRIBUTES : xmm_state_r REQUIRES_ALIGNMENT x87_mmx_state_r NOTSX SPECIAL_AGEN_REQUIRED
+PATTERN   : 0x0F 0xC7 MOD[mm]  MOD!=3 REG[0b101] RM[nnn] MODRM() norexw_prefix no_refining_prefix
+OPERANDS  : MEM0:w:mxsave REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_XCR0:r:SUPP
+}
+
+
+{
+ICLASS    : XSAVES64
+CPL       : 0
+CATEGORY  : XSAVE
+EXTENSION : XSAVES
+COMMENT   : variable length load and conditianal reg write
+ATTRIBUTES : xmm_state_r REQUIRES_ALIGNMENT x87_mmx_state_r NOTSX SPECIAL_AGEN_REQUIRED
+PATTERN   : 0x0F 0xC7 MOD[mm]  MOD!=3 REG[0b101] RM[nnn] MODRM() rexw_prefix no_refining_prefix
+OPERANDS  : MEM0:w:mxsave REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_XCR0:r:SUPP
+}
+
+
+
+
+
+{
+ICLASS    : XRSTORS
+CPL       : 0
+CATEGORY  : XSAVE
+EXTENSION : XSAVES
+COMMENT   : variable length load and conditianal reg write
+ATTRIBUTES : xmm_state_w REQUIRES_ALIGNMENT x87_mmx_state_w NOTSX SPECIAL_AGEN_REQUIRED
+PATTERN   : 0x0F 0xC7 MOD[mm]  MOD!=3 REG[0b011] RM[nnn] MODRM() norexw_prefix no_refining_prefix
+OPERANDS  : MEM0:r:mxsave REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_XCR0:r:SUPP
+}
+
+
+{
+ICLASS    : XRSTORS64
+CPL       : 0
+CATEGORY  : XSAVE
+EXTENSION : XSAVES
+COMMENT   : variable length load and conditianal reg write
+ATTRIBUTES : xmm_state_w REQUIRES_ALIGNMENT x87_mmx_state_w NOTSX SPECIAL_AGEN_REQUIRED
+PATTERN   : 0x0F 0xC7 MOD[mm]  MOD!=3 REG[0b011] RM[nnn] MODRM() rexw_prefix no_refining_prefix
+OPERANDS  : MEM0:r:mxsave REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_XCR0:r:SUPP
+}
+
+
+
+###FILE: ./datafiles/xsavec/xsavec-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : XSAVEC
+CPL       : 3
+CATEGORY  : XSAVE
+EXTENSION : XSAVEC
+COMMENT   : variable length store
+ATTRIBUTES : xmm_state_r REQUIRES_ALIGNMENT x87_mmx_state_r NOTSX SPECIAL_AGEN_REQUIRED
+PATTERN   : 0x0F 0xC7 MOD[mm]  MOD!=3 REG[0b100] RM[nnn] MODRM() norexw_prefix no_refining_prefix
+OPERANDS  : MEM0:w:mxsave REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_XCR0:r:SUPP
+}
+
+
+
+{
+ICLASS    : XSAVEC64
+CPL       : 3
+CATEGORY  : XSAVE
+EXTENSION : XSAVEC
+COMMENT   : variable length store
+ATTRIBUTES : xmm_state_r REQUIRES_ALIGNMENT x87_mmx_state_r  NOTSX SPECIAL_AGEN_REQUIRED
+PATTERN   : 0x0F 0xC7 MOD[mm]  MOD!=3 REG[0b100] RM[nnn] MODRM() rexw_prefix no_refining_prefix
+OPERANDS  : MEM0:w:mxsave REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_XCR0:r:SUPP
+}
+
+
+
+
+###FILE: ./datafiles/avx/avx-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+# The neat thing is we can just end a nonterminal by starting a new one.
+
+AVX_INSTRUCTIONS()::
+{
+ICLASS    : VADDPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x58  V66 VL128 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x58  V66 VL128 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x58  V66 VL256 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x58  V66 VL256 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+
+
+{
+ICLASS    : VADDPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x58  VNP VL128 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x58  VNP VL128 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x58  VNP VL256 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x58  VNP VL256 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+{
+ICLASS    : VADDSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+ATTRIBUTES : simd_scalar MXCSR
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x58  VF2  V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64
+
+PATTERN : VV1 0x58  VF2  V0F  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64
+}
+
+{
+ICLASS    : VADDSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+ATTRIBUTES : simd_scalar MXCSR
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x58  VF3  V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x58  VF3  V0F  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+
+{
+ICLASS    : VADDSUBPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0xD0  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0xD0  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0xD0  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0xD0  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+
+{
+ICLASS    : VADDSUBPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0xD0  VL128 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0xD0  VL128 VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0xD0  VL256 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0xD0  VL256 VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+{
+ICLASS    : VANDPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+PATTERN : VV1 0x54  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0x54  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+
+PATTERN : VV1 0x54  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:qq:u64
+
+PATTERN : VV1 0x54  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=YMM_B():r:qq:u64
+}
+
+
+
+{
+ICLASS    : VANDPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+PATTERN : VV1 0x54  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x54  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq REG2=XMM_B():r:dq
+
+PATTERN : VV1 0x54  VL256 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x54  VL256 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq REG2=YMM_B():r:qq
+}
+
+
+{
+ICLASS    : VANDNPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+PATTERN : VV1 0x55  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0x55  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+
+PATTERN : VV1 0x55  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:qq:u64
+
+PATTERN : VV1 0x55  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=YMM_B():r:qq:u64
+}
+
+
+
+{
+ICLASS    : VANDNPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+PATTERN : VV1 0x55  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x55  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq REG2=XMM_B():r:dq
+
+PATTERN : VV1 0x55  VL256 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x55  VL256 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq REG2=YMM_B():r:qq
+}
+
+
+
+{
+ICLASS    : VBLENDPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x0D  VL128 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0x0D  VL128 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0x0D  VL256 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 IMM0:r:b
+
+PATTERN : VV1 0x0D  VL256 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 IMM0:r:b
+}
+
+
+{
+ICLASS    : VBLENDPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x0C  VL128 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x0C  VL128 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x0C  VL256 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 IMM0:r:b
+
+PATTERN : VV1 0x0C  VL256 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 IMM0:r:b
+}
+
+
+
+
+
+
+{
+ICLASS    : VCMPPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0xC2  V66 VL128 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0xC2  V66 VL128 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0xC2  V66 VL256 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 IMM0:r:b
+
+PATTERN : VV1 0xC2  V66 VL256 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 IMM0:r:b
+}
+
+
+
+{
+ICLASS    : VCMPPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0xC2  VNP VL128 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0xC2  VNP VL128 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0xC2  VNP VL256 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 IMM0:r:b
+
+PATTERN : VV1 0xC2  VNP VL256 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 IMM0:r:b
+}
+
+
+
+{
+ICLASS    : VCMPSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+PATTERN : VV1 0xC2   VF2 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64 IMM0:r:b
+
+PATTERN : VV1 0xC2   VF2 V0F  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64 IMM0:r:b
+}
+
+
+
+{
+ICLASS    : VCMPSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0xC2   VF3 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+
+PATTERN : VV1 0xC2   VF3 V0F  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32 IMM0:r:b
+}
+
+
+{
+ICLASS    : VCOMISD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+FLAGS     : MUST [ zf-mod pf-mod cf-mod of-0 af-0 sf-0 ]
+PATTERN : VV1 0x2F   V66 V0F  NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():r:q:f64 MEM0:r:q:f64
+
+PATTERN : VV1 0x2F   V66 V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():r:q:f64 REG1=XMM_B():r:q:f64
+}
+
+{
+ICLASS    : VCOMISS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+FLAGS     : MUST [ zf-mod pf-mod cf-mod of-0 af-0 sf-0 ]
+PATTERN : VV1 0x2F   VNP V0F  NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():r:d:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x2F   VNP V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():r:d:f32 REG1=XMM_B():r:d:f32
+}
+
+
+{
+ICLASS    : VCVTDQ2PD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0xE6  VL128 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 MEM0:r:q:i32
+
+PATTERN : VV1 0xE6  VL128 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_B():r:q:i32
+
+PATTERN : VV1 0xE6  VL256 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 MEM0:r:dq:i32
+
+PATTERN : VV1 0xE6  VL256 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=XMM_B():r:dq:i32
+}
+
+{
+ICLASS    : VCVTDQ2PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5B  VL128 VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x5B  VL128 VNP V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_B():r:dq:i32
+
+PATTERN : VV1 0x5B  VL256 VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x5B  VL256 VNP V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_B():r:qq:i32
+}
+
+{
+ICLASS    : VCVTPD2DQ
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0xE6  VL128 VF2 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 MEM0:r:dq:f64
+
+PATTERN : VV1 0xE6  VL128 VF2 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_B():r:dq:f64
+
+PATTERN : VV1 0xE6  VL256 VF2 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 MEM0:r:qq:f64
+
+PATTERN : VV1 0xE6  VL256 VF2 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=YMM_B():r:qq:f64
+}
+
+
+{
+ICLASS    : VCVTTPD2DQ
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0xE6  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 MEM0:r:dq:f64
+
+PATTERN : VV1 0xE6  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_B():r:dq:f64
+
+PATTERN : VV1 0xE6  VL256 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 MEM0:r:qq:f64
+
+PATTERN : VV1 0xE6  VL256 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=YMM_B():r:qq:f64
+}
+
+
+{
+ICLASS    : VCVTPD2PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5A  V66 VL128 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 MEM0:r:dq:f64
+
+PATTERN : VV1 0x5A  V66 VL128 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x5A  V66 VL256 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 MEM0:r:qq:f64
+
+PATTERN : VV1 0x5A  V66 VL256 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=YMM_B():r:qq:f64
+}
+
+{
+ICLASS    : VCVTPS2DQ
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5B  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x5B  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x5B  VL256 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x5B  VL256 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_B():r:qq:f32
+}
+
+{
+ICLASS    : VCVTTPS2DQ
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5B  VL128 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x5B  VL128 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x5B  VL256 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x5B  VL256 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_B():r:qq:f32
+}
+
+{
+ICLASS    : VCVTPS2PD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5A  VNP VL128 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 MEM0:r:q:f32
+
+PATTERN : VV1 0x5A  VNP VL128 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_B():r:q:f32
+
+PATTERN : VV1 0x5A  VNP VL256 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 MEM0:r:dq:f32
+
+PATTERN : VV1 0x5A  VNP VL256 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=XMM_B():r:dq:f32
+}
+
+
+
+
+{
+ICLASS    : VCVTSD2SI
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+COMMENT   : SNB/IVB/HSW require VEX.L=128. Later processors are LIG
+
+PATTERN : VV1 0x2D   VF2 V0F  NOVSR not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR32_R():w:d:i32 MEM0:r:q:f64
+
+PATTERN : VV1 0x2D   VF2 V0F  NOVSR not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:i32 REG1=XMM_B():r:q:f64
+
+
+PATTERN : VV1 0x2D   VF2 V0F  NOVSR mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR32_R():w:d:i32 MEM0:r:q:f64
+
+PATTERN : VV1 0x2D   VF2 V0F  NOVSR mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:i32 REG1=XMM_B():r:q:f64
+
+
+
+PATTERN : VV1 0x2D   VF2 V0F  NOVSR mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR64_R():w:q:i64 MEM0:r:q:f64
+
+PATTERN : VV1 0x2D   VF2 V0F  NOVSR mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR64_R():w:q:i64 REG1=XMM_B():r:q:f64
+}
+
+{
+ICLASS    : VCVTTSD2SI
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+COMMENT   : SNB/IVB/HSW require VEX.L=128. Later processors are LIG
+
+
+PATTERN : VV1 0x2C   VF2 V0F  NOVSR not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR32_R():w:d:i32 MEM0:r:q:f64
+
+PATTERN : VV1 0x2C   VF2 V0F  NOVSR not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:i32 REG1=XMM_B():r:q:f64
+
+
+
+PATTERN : VV1 0x2C   VF2 V0F  NOVSR mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR32_R():w:d:i32 MEM0:r:q:f64
+
+PATTERN : VV1 0x2C   VF2 V0F  NOVSR mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:i32 REG1=XMM_B():r:q:f64
+
+
+
+PATTERN : VV1 0x2C   VF2 V0F  NOVSR mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR64_R():w:q:i64 MEM0:r:q:f64
+
+PATTERN : VV1 0x2C   VF2 V0F  NOVSR mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR64_R():w:q:i64 REG1=XMM_B():r:q:f64
+}
+
+
+
+
+{
+ICLASS    : VCVTSS2SI
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+COMMENT   : SNB/IVB/HSW require VEX.L=128. Later processors are LIG
+
+PATTERN : VV1 0x2D   VF3 V0F  NOVSR not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR32_R():w:d:i32 MEM0:r:d:f32
+
+PATTERN : VV1 0x2D   VF3 V0F  NOVSR not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:i32 REG1=XMM_B():r:d:f32
+
+
+
+PATTERN : VV1 0x2D   VF3 V0F  NOVSR mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR32_R():w:d:i32 MEM0:r:d:f32
+
+PATTERN : VV1 0x2D   VF3 V0F  NOVSR mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:i32 REG1=XMM_B():r:d:f32
+
+
+PATTERN : VV1 0x2D   VF3 V0F  NOVSR mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR64_R():w:q:i64 MEM0:r:d:f32
+
+PATTERN : VV1 0x2D   VF3 V0F  NOVSR mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR64_R():w:q:i64 REG1=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VCVTTSS2SI
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+COMMENT   : SNB/IVB/HSW require VEX.L=128. Later processors are LIG
+
+PATTERN : VV1 0x2C   VF3 V0F  NOVSR not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR32_R():w:d:i32 MEM0:r:d:f32
+
+PATTERN : VV1 0x2C   VF3 V0F  NOVSR not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:i32 REG1=XMM_B():r:d:f32
+
+
+
+PATTERN : VV1 0x2C   VF3 V0F  NOVSR mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR32_R():w:d:i32 MEM0:r:d:f32
+
+PATTERN : VV1 0x2C   VF3 V0F  NOVSR mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:i32 REG1=XMM_B():r:d:f32
+
+
+
+
+PATTERN : VV1 0x2C   VF3 V0F  NOVSR mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPR64_R():w:q:i64 MEM0:r:d:f32
+
+PATTERN : VV1 0x2C   VF3 V0F  NOVSR mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR64_R():w:q:i64 REG1=XMM_B():r:d:f32
+}
+
+
+
+
+{
+ICLASS    : VCVTSD2SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x5A  VF2 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:q:f64
+
+PATTERN : VV1 0x5A  VF2 V0F  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:q:f64
+
+}
+
+
+{
+ICLASS    : VCVTSI2SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x2A  VF2 V0F not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:d:i32
+
+PATTERN : VV1 0x2A  VF2 V0F not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=GPR32_B():r:d:i32
+
+
+
+PATTERN : VV1 0x2A  VF2 V0F mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:d:i32
+
+PATTERN : VV1 0x2A  VF2 V0F mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=GPR32_B():r:d:i32
+
+
+
+PATTERN : VV1 0x2A  VF2 V0F mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:i64
+
+PATTERN : VV1 0x2A  VF2 V0F mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=GPR64_B():r:q:i64
+}
+
+
+{
+ICLASS    : VCVTSI2SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x2A   VF3 V0F not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:i32
+
+PATTERN : VV1 0x2A   VF3 V0F not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=GPR32_B():r:d:i32
+
+
+
+PATTERN : VV1 0x2A   VF3 V0F mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:i32
+
+PATTERN : VV1 0x2A   VF3 V0F mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=GPR32_B():r:d:i32
+
+
+
+PATTERN : VV1 0x2A   VF3 V0F mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:q:i64
+
+PATTERN : VV1 0x2A   VF3 V0F mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=GPR64_B():r:q:i64
+}
+
+
+{
+ICLASS    : VCVTSS2SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x5A  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:d:f32
+
+PATTERN : VV1 0x5A  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:d:f32
+}
+
+
+{
+ICLASS    : VDIVPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5E  V66 V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x5E  V66 V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x5E  V66 V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x5E  V66 V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+
+
+{
+ICLASS    : VDIVPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5E  VNP V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x5E  VNP V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x5E  VNP V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x5E  VNP V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+
+{
+ICLASS    : VDIVSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x5E  VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64
+
+PATTERN : VV1 0x5E  VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64
+}
+
+{
+ICLASS    : VDIVSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x5E  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x5E  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+
+{
+ICLASS    : VEXTRACTF128
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x19  norexw_prefix VL256 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : MEM0:w:dq:f64 REG0=YMM_R():r:dq:f64  IMM0:r:b
+
+PATTERN : VV1 0x19  norexw_prefix VL256 V66 V0F3A NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_B():w:dq:f64 REG1=YMM_R():r:dq:f64 IMM0:r:b
+}
+
+
+
+{
+ICLASS    : VDPPD
+EXCEPTIONS: avx-type-2D
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x41  VL128 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0x41  VL128 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 IMM0:r:b
+}
+
+{
+ICLASS    : VDPPS
+EXCEPTIONS: avx-type-2D
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x40  VL128 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x40  VL128 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x40  VL256 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 IMM0:r:b
+
+PATTERN : VV1 0x40  VL256 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 IMM0:r:b
+}
+
+
+{
+ICLASS    : VEXTRACTPS
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x17  VL128 V66 V0F3A  NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : MEM0:w:d:f32  REG0=XMM_R():r:dq:f32  IMM0:r:b
+
+PATTERN : VV1 0x17  VL128 V66 V0F3A  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=GPR32_B():w  REG1=XMM_R():r:dq:f32  IMM0:r:b
+}
+
+
+{
+ICLASS    : VZEROALL
+EXCEPTIONS: avx-type-8
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : xmm_state_w
+
+PATTERN : VV1 0x77 VNP  V0F VL256  NOVSR
+OPERANDS:
+
+}
+
+# FIXME: how to denote partial upper clobber!
+{
+ICLASS    : VZEROUPPER
+EXCEPTIONS: avx-type-8
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : xmm_state_w NOTSX  # FIXME: should be ymm_state_w?
+
+PATTERN : VV1 0x77 VNP  V0F VL128 NOVSR
+OPERANDS:
+}
+
+
+{
+ICLASS    : VHADDPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x7C  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x7C  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x7C  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x7C  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+
+
+{
+ICLASS    : VHADDPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x7C  VL128 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x7C  VL128 VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x7C  VL256 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x7C  VL256 VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+{
+ICLASS    : VHSUBPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x7D  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x7D  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x7D  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x7D  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+
+
+{
+ICLASS    : VHSUBPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x7D  VL128 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x7D  VL128 VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x7D  VL256 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x7D  VL256 VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+
+{
+ICLASS    : VPERMILPD
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+# 2008-02-01 moved norexw_prefix to after V0F38 to avoid graph build conflict with VBLENDPD
+PATTERN : VV1 0x0D VL128 V66 V0F38 norexw_prefix  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:u64
+
+PATTERN : VV1 0x0D  VL128 V66 V0F38 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:u64
+
+PATTERN : VV1 0x0D  VL256 V66 V0F38 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:u64
+
+PATTERN : VV1 0x0D  VL256 V66 V0F38 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:u64
+
+########################################
+# IMMEDIATE FORM
+########################################
+
+# 2008-02-01 moved norexw_prefix to after V0F3A to avoid a graph build conflict with VPHSUBW
+PATTERN : VV1 0x05  VL128 V66 V0F3A norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 MEM0:r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0x05  VL128 V66 V0F3A norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_B():r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0x05  VL256 V66 V0F3A norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 MEM0:r:qq:f64 IMM0:r:b
+
+PATTERN : VV1 0x05  VL256 V66 V0F3A norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_B():r:qq:f64 IMM0:r:b
+}
+
+
+{
+ICLASS    : VPERMILPS
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+# moved norexw_prefix to after V0F38 to avoid graph build conflict with VBLENDPS
+PATTERN : VV1 0x0C VL128 V66 V0F38 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:u32
+
+PATTERN : VV1 0x0C  VL128 V66 V0F38 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:u32
+
+PATTERN : VV1 0x0C  VL256 V66 V0F38  norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:u32
+
+PATTERN : VV1 0x0C  VL256 V66 V0F38  norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:u32
+
+########################################
+# IMMEDIATE FORM
+########################################
+
+# 2008-02-01: moved norexw_prefix after V0F3A due to graph-build collision with VPMADDUBSW
+PATTERN : VV1 0x04 VL128 V66 V0F3A norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 MEM0:r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x04 VL128 V66 V0F3A norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_B():r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x04 VL256 V66 V0F3A norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 MEM0:r:qq:f32 IMM0:r:b
+
+PATTERN : VV1 0x04 VL256 V66 V0F3A norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_B():r:qq:f32 IMM0:r:b
+}
+
+
+{
+ICLASS    : VPERM2F128
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+
+# 2008-02-01 moved norexw_prefix to after V0F3A to avoid conflict with VPHSUBD
+PATTERN : VV1 0x06 VL256 V66 V0F3A norexw_prefix  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 IMM0:r:b
+
+PATTERN : VV1 0x06 VL256 V66 V0F3A norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 IMM0:r:b
+}
+
+
+
+{
+ICLASS    : VBROADCASTSS
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX
+PATTERN : VV1 0x18  norexw_prefix VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 MEM0:r:d:f32 EMX_BROADCAST_1TO4_32
+
+PATTERN : VV1 0x18  norexw_prefix VL256 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 MEM0:r:d:f32 EMX_BROADCAST_1TO8_32
+}
+{
+ICLASS    : VBROADCASTSD
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX
+PATTERN : VV1 0x19  norexw_prefix VL256 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 MEM0:r:q:f64 EMX_BROADCAST_1TO4_64
+}
+
+{
+ICLASS    : VBROADCASTF128
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX
+COMMENT : There is no F128 type. I just set these to f64 for lack of anything better.
+PATTERN : VV1 0x1A norexw_prefix VL256 V66 V0F38 NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 MEM0:r:dq:f64 EMX_BROADCAST_2TO4_64
+}
+
+
+{
+ICLASS    : VINSERTF128
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x18  norexw_prefix VL256 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:dq:f64 IMM0:r:b EMX_BROADCAST_2TO4_64
+
+PATTERN : VV1 0x18  norexw_prefix  VL256 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=XMM_B():r:dq:f64 IMM0:r:b EMX_BROADCAST_2TO4_64
+}
+
+{
+ICLASS    : VINSERTPS
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x21  VL128 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+
+PATTERN : VV1 0x21  VL128 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 IMM0:r:b
+}
+
+
+
+
+
+{
+ICLASS    : VLDDQU
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF0  VL128 VF2 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq  MEM0:r:dq
+
+PATTERN : VV1 0xF0  VL256 VF2 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq MEM0:r:qq
+}
+
+
+
+
+
+
+{
+ICLASS    : VMASKMOVPS
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : maskop NONTEMPORAL
+# load  forms
+PATTERN : VV1 0x2C V66 VL128 V0F38 norexw_prefix  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32   REG1=XMM_N():r:dq MEM0:r:dq:f32
+
+PATTERN : VV1 0x2C V66 VL256 V0F38    norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32   REG1=YMM_N():r:qq MEM0:r:qq:f32
+
+# store forms
+PATTERN : VV1 0x2E V66 V0F38 VL128  norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:dq:f32  REG0=XMM_N():r:dq   REG1=XMM_R():r:dq:f32
+
+PATTERN : VV1 0x2E V66 V0F38 VL256 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:qq:f32   REG0=YMM_N():r:qq  REG1=YMM_R():r:qq:f32
+}
+
+{
+ICLASS    : VMASKMOVPD
+EXCEPTIONS: avx-type-6
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : maskop
+# load forms
+PATTERN : VV1 0x2D  V66 VL128 V0F38  norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64   REG1=XMM_N():r:dq:u64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x2D  V66 VL256 V0F38 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64   REG1=YMM_N():r:qq:u64 MEM0:r:qq:f64
+
+# store forms
+PATTERN : VV1 0x2F   V66 V0F38 VL128 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:dq:f64  REG0=XMM_N():r:dq:u64  REG1=XMM_R():r:dq:f64
+
+PATTERN : VV1 0x2F   V66 V0F38 VL256 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:qq:f64  REG0=YMM_N():r:qq:u64   REG1=YMM_R():r:qq:f64
+}
+
+{
+ICLASS    : VPTEST
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX
+FLAGS     : MUST [ zf-mod cf-mod ]
+PATTERN : VV1 0x17  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x17  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():r:dq REG1=XMM_B():r:dq
+
+PATTERN : VV1 0x17  VL256 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x17  VL256 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():r:qq REG1=YMM_B():r:qq
+}
+
+{
+ICLASS    : VTESTPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+FLAGS     : MUST [ zf-mod cf-mod ]
+PATTERN : VV1 0x0E VL128 V66 V0F38 norexw_prefix  NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x0E  VL128 V66 V0F38 norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():r:dq:f32 REG1=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x0E VL256 V66 V0F38  norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x0E VL256 V66 V0F38 norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():r:qq:f32 REG1=YMM_B():r:qq:f32
+}
+
+{
+ICLASS    : VTESTPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+FLAGS     : MUST [ zf-mod cf-mod ]
+PATTERN : VV1 0x0F  VL128 V66 V0F38 norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x0F VL128 V66 V0F38 norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():r:dq:f64 REG1=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x0F VL256 V66 V0F38  norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x0F VL256 V66 V0F38 norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():r:qq:f64 REG1=YMM_B():r:qq:f64
+}
+
+
+{
+ICLASS    : VMAXPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5F  V66 V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x5F  V66 V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x5F  V66 V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x5F  V66 V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+
+{
+ICLASS    : VMAXPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5F  VNP V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x5F  VNP V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x5F  VNP V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x5F  VNP V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+
+{
+ICLASS    : VMAXSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x5F  VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64
+
+PATTERN : VV1 0x5F  VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64
+}
+
+{
+ICLASS    : VMAXSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x5F  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x5F  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VMINPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5D  V66 V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x5D  V66 V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x5D  V66 V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x5D  V66 V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+
+{
+ICLASS    : VMINPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES:  MXCSR
+PATTERN : VV1 0x5D  VNP V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x5D  VNP V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x5D  VNP V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x5D  VNP V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+
+{
+ICLASS    : VMINSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x5D  VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64
+
+PATTERN : VV1 0x5D  VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64
+}
+
+{
+ICLASS    : VMINSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+PATTERN : VV1 0x5D  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x5D  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+
+{
+ICLASS    : VMOVAPD
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT
+
+# 128b load
+
+PATTERN : VV1 0x28  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64  MEM0:r:dq:f64
+
+PATTERN : VV1 0x28  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64  REG1=XMM_B():r:dq:f64
+IFORM     : VMOVAPD_XMMdq_XMMdq_28
+
+# 128b store
+
+PATTERN : VV1 0x29  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:dq:f64 REG0=XMM_R():r:dq:f64
+
+PATTERN : VV1 0x29  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  :  REG0=XMM_B():w:dq:f64 REG1=XMM_R():r:dq:f64
+IFORM     : VMOVAPD_XMMdq_XMMdq_29
+
+# 256b load
+
+PATTERN : VV1 0x28  VL256 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64  MEM0:r:qq:f64
+
+PATTERN : VV1 0x28  VL256 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64  REG1=YMM_B():r:qq:f64
+IFORM     : VMOVAPD_YMMqq_YMMqq_28
+
+# 256b store
+
+PATTERN : VV1 0x29  VL256 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:qq:f64 REG0=YMM_R():r:qq:f64
+
+PATTERN : VV1 0x29  VL256 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  :  REG0=YMM_B():w:qq:f64 REG1=YMM_R():r:qq:f64
+IFORM     : VMOVAPD_YMMqq_YMMqq_29
+}
+
+
+
+{
+ICLASS    : VMOVAPS
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT
+
+# 128b load
+
+PATTERN : VV1 0x28  VL128 VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32  MEM0:r:dq:f32
+
+PATTERN : VV1 0x28  VL128 VNP V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_B():r:dq:f32
+IFORM     : VMOVAPS_XMMdq_XMMdq_28
+# 128b store
+
+PATTERN : VV1 0x29  VL128 VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:dq:f32 REG0=XMM_R():r:dq:f32
+
+PATTERN : VV1 0x29  VL128 VNP V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  :  REG0=XMM_B():w:dq:f32 REG1=XMM_R():r:dq:f32
+IFORM     : VMOVAPS_XMMdq_XMMdq_29
+
+# 256b load
+
+PATTERN : VV1 0x28  VL256 VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32  MEM0:r:qq:f32
+
+PATTERN : VV1 0x28  VL256 VNP V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32  REG1=YMM_B():r:qq:f32
+IFORM     : VMOVAPS_YMMqq_YMMqq_28
+
+# 256b store
+
+PATTERN : VV1 0x29  VL256 VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:qq:f32 REG0=YMM_R():r:qq:f32
+
+PATTERN : VV1 0x29  VL256 VNP V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  :  REG0=YMM_B():w:qq:f32 REG1=YMM_R():r:qq:f32
+IFORM     : VMOVAPS_YMMqq_YMMqq_29
+}
+
+
+
+{
+ICLASS    : VMOVD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+
+# 32b load
+PATTERN : VV1 0x6E  VL128 V66 V0F not64 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq  MEM0:r:d
+
+PATTERN : VV1 0x6E  VL128 V66 V0F not64  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq  REG1=GPR32_B():r:d
+
+# 32b store
+PATTERN : VV1 0x7E  VL128 V66 V0F not64  NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:d           REG0=XMM_R():r:d
+
+PATTERN : VV1 0x7E  VL128 V66 V0F not64  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_B():w:d REG1=XMM_R():r:d
+
+
+
+# 32b load
+PATTERN : VV1 0x6E  VL128 V66 V0F mode64 norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq  MEM0:r:d
+
+PATTERN : VV1 0x6E  VL128 V66 V0F mode64 norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq  REG1=GPR32_B():r:d
+
+# 32b store
+PATTERN : VV1 0x7E  VL128 V66 V0F mode64 norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:d           REG0=XMM_R():r:d
+
+PATTERN : VV1 0x7E  VL128 V66 V0F mode64 norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_B():w:d REG1=XMM_R():r:d
+
+
+}
+
+{
+ICLASS    : VMOVQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+
+# 64b load
+PATTERN : VV1 0x6E  VL128 V66 V0F mode64 rexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq  MEM0:r:q
+IFORM     : VMOVQ_XMMdq_MEMq_6E
+
+PATTERN : VV1 0x6E  VL128 V66 V0F mode64 rexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq  REG1=GPR64_B():r:q
+
+# 64b store
+PATTERN : VV1 0x7E  VL128 V66 V0F mode64 rexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:q           REG0=XMM_R():r:q
+IFORM     : VMOVQ_MEMq_XMMq_7E
+
+PATTERN : VV1 0x7E  VL128 V66 V0F mode64 rexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR64_B():w:q REG1=XMM_R():r:q
+
+
+# 2nd page of MOVQ forms
+PATTERN : VV1 0x7E  VL128 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq   MEM0:r:q
+IFORM     : VMOVQ_XMMdq_MEMq_7E
+
+PATTERN : VV1 0x7E  VL128 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq   REG1=XMM_B():r:q
+IFORM     : VMOVQ_XMMdq_XMMq_7E
+
+PATTERN : VV1 0xD6  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:q   REG0=XMM_R():r:q
+IFORM     : VMOVQ_MEMq_XMMq_D6
+
+PATTERN : VV1 0xD6  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_B():w:dq  REG1=XMM_R():r:q
+IFORM     : VMOVQ_XMMdq_XMMq_D6
+
+}
+
+
+
+
+{
+ICLASS    : VMOVDDUP
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+
+PATTERN : VV1 0x12  VL128 VF2 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64  MEM0:r:q:f64
+
+PATTERN : VV1 0x12  VL128 VF2 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64  REG1=XMM_B():r:dq:f64
+
+
+PATTERN : VV1 0x12  VL256 VF2 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64  MEM0:r:qq:f64
+
+PATTERN : VV1 0x12  VL256 VF2 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64  REG1=YMM_B():r:qq:f64
+}
+
+
+
+{
+ICLASS    : VMOVDQA
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT
+
+# LOAD XMM
+
+PATTERN : VV1 0x6F  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq  MEM0:r:dq
+
+PATTERN : VV1 0x6F  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq  REG1=XMM_B():r:dq
+IFORM     : VMOVDQA_XMMdq_XMMdq_6F
+
+# STORE XMM
+
+PATTERN : VV1 0x7F  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:dq REG0=XMM_R():r:dq
+
+PATTERN : VV1 0x7F  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_B():w:dq REG1=XMM_R():r:dq
+IFORM     : VMOVDQA_XMMdq_XMMdq_7F
+
+# LOAD YMM
+
+PATTERN : VV1 0x6F  VL256 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq  MEM0:r:qq
+
+PATTERN : VV1 0x6F  VL256 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq  REG1=YMM_B():r:qq
+IFORM     : VMOVDQA_YMMqq_YMMqq_6F
+
+
+# STORE YMM
+
+PATTERN : VV1 0x7F  VL256 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:qq REG0=YMM_R():r:qq
+
+PATTERN : VV1 0x7F  VL256 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_B():w:qq REG1=YMM_R():r:qq
+IFORM     : VMOVDQA_YMMqq_YMMqq_7F
+}
+
+
+{
+ICLASS    : VMOVDQU
+EXCEPTIONS: avx-type-4M
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+
+# LOAD XMM
+
+PATTERN : VV1 0x6F  VL128 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq  MEM0:r:dq
+
+PATTERN : VV1 0x6F  VL128 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq  REG1=XMM_B():r:dq
+IFORM     : VMOVDQU_XMMdq_XMMdq_6F
+
+# LOAD YMM
+
+PATTERN : VV1 0x6F  VL256 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq  MEM0:r:qq
+
+PATTERN : VV1 0x6F  VL256 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq  REG1=YMM_B():r:qq
+IFORM     : VMOVDQU_YMMqq_YMMqq_6F
+
+# STORE XMM
+
+PATTERN : VV1 0x7F  VL128 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:dq REG0=XMM_R():r:dq
+
+PATTERN : VV1 0x7F  VL128 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_B():w:dq REG1=XMM_R():r:dq
+IFORM     : VMOVDQU_XMMdq_XMMdq_7F
+
+# STORE YMM
+
+PATTERN : VV1 0x7F  VL256 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:qq REG0=YMM_R():r:qq
+
+PATTERN : VV1 0x7F  VL256 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_B():w:qq REG1=YMM_R():r:qq
+IFORM     : VMOVDQU_YMMqq_YMMqq_7F
+}
+
+#################################################
+## skipping to the end
+#################################################
+
+#################################################
+## MACROS
+#################################################
+{
+ICLASS    : VMOVSHDUP
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+PATTERN : VV1 0x16  VL128 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x16  VL128 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x16  VL256 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x16  VL256 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VMOVSLDUP
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+PATTERN : VV1 0x12  VL128 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x12  VL128 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x12  VL256 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x12  VL256 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_B():r:qq:f32
+}
+
+
+
+{
+ICLASS    : VPOR
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX
+PATTERN : VV1 0xEB  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u128 REG1=XMM_N():r:dq:u128 MEM0:r:dq:u128
+
+PATTERN : VV1 0xEB  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u128 REG1=XMM_N():r:dq:u128 REG2=XMM_B():r:dq:u128
+}
+{
+ICLASS    : VPAND
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX
+PATTERN : VV1 0xDB  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u128 REG1=XMM_N():r:dq:u128 MEM0:r:dq:u128
+
+PATTERN : VV1 0xDB  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u128 REG1=XMM_N():r:dq:u128 REG2=XMM_B():r:dq:u128
+}
+{
+ICLASS    : VPANDN
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX
+PATTERN : VV1 0xDF  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u128 REG1=XMM_N():r:dq:u128 MEM0:r:dq:u128
+
+PATTERN : VV1 0xDF  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u128 REG1=XMM_N():r:dq:u128 REG2=XMM_B():r:dq:u128
+}
+{
+ICLASS    : VPXOR
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX
+PATTERN : VV1 0xEF  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u128 REG1=XMM_N():r:dq:u128 MEM0:r:dq:u128
+
+PATTERN : VV1 0xEF  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u128 REG1=XMM_N():r:dq:u128 REG2=XMM_B():r:dq:u128
+}
+
+
+{
+ICLASS    : VPABSB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x1C   V66 V0F38 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 MEM0:r:dq:i8
+
+PATTERN : VV1 0x1C  V66 V0F38 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8  REG1=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPABSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x1D   V66 V0F38 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x1D  V66 V0F38 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16  REG1=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPABSD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x1E   V66 V0F38 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x1E  V66 V0F38 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32  REG1=XMM_B():r:dq:i32
+}
+
+{
+ICLASS    : VPHMINPOSUW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x41   V66 V0F38 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0x41  V66 V0F38 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16  REG1=XMM_B():r:dq:u16
+}
+
+
+
+
+
+
+
+
+
+
+{
+ICLASS    : VPSHUFD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x70  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq MEM0:r:dq  IMM0:r:b
+
+PATTERN : VV1 0x70  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_B():r:dq IMM0:r:b
+}
+{
+ICLASS    : VPSHUFHW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x70  VL128 VF3 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq MEM0:r:dq  IMM0:r:b
+
+PATTERN : VV1 0x70  VL128 VF3 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_B():r:dq IMM0:r:b
+}
+{
+ICLASS    : VPSHUFLW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x70  VL128 VF2 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq MEM0:r:dq  IMM0:r:b
+
+PATTERN : VV1 0x70  VL128 VF2 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_B():r:dq IMM0:r:b
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+{
+ICLASS    : VPACKSSWB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x63  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x63  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPACKSSDW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x6B  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x6B  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+{
+ICLASS    : VPACKUSWB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x67  V66 V0F VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x67  V66 V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPACKUSDW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x2B  V66 V0F38 VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x2B  V66 V0F38 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+
+{
+ICLASS    : VPSLLW
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF1  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u64
+
+PATTERN : VV1 0xF1  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u64
+}
+{
+ICLASS    : VPSLLD
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF2  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u64
+
+PATTERN : VV1 0xF2  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u64
+}
+{
+ICLASS    : VPSLLQ
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF3  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0xF3  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+}
+
+{
+ICLASS    : VPSRLW
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xD1  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u64
+
+PATTERN : VV1 0xD1  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u64
+}
+{
+ICLASS    : VPSRLD
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xD2  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u64
+
+PATTERN : VV1 0xD2  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u64
+}
+{
+ICLASS    : VPSRLQ
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xD3  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0xD3  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+}
+
+{
+ICLASS    : VPSRAW
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xE1  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:u64
+
+PATTERN : VV1 0xE1  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:u64
+}
+{
+ICLASS    : VPSRAD
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xE2  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:u64
+
+PATTERN : VV1 0xE2  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:u64
+}
+
+{
+ICLASS    : VPADDB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xFC  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8
+
+PATTERN : VV1 0xFC  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPADDW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xFD  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xFD  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPADDD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xFE  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0xFE  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+{
+ICLASS    : VPADDQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xD4  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i64 REG1=XMM_N():r:dq:i64 MEM0:r:dq:i64
+
+PATTERN : VV1 0xD4  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i64 REG1=XMM_N():r:dq:i64 REG2=XMM_B():r:dq:i64
+}
+
+{
+ICLASS    : VPADDSB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xEC  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8
+
+PATTERN : VV1 0xEC  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPADDSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xED  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xED  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+
+{
+ICLASS    : VPADDUSB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xDC  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0xDC  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPADDUSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xDD  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0xDD  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+
+{
+ICLASS    : VPAVGB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xE0  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0xE0  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPAVGW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xE3  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0xE3  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+
+{
+ICLASS    : VPCMPEQB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x74  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0x74  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPCMPEQW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x75  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0x75  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+{
+ICLASS    : VPCMPEQD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x76  V66 V0F VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u32
+
+PATTERN : VV1 0x76  V66 V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u32
+}
+{
+ICLASS    : VPCMPEQQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x29  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0x29  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+}
+
+{
+ICLASS    : VPCMPGTB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x64  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8
+
+PATTERN : VV1 0x64  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPCMPGTW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x65  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x65  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPCMPGTD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x66  V66 V0F VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x66  V66 V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+{
+ICLASS    : VPCMPGTQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x37  V66 V0F38 VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i64 MEM0:r:dq:i64
+
+PATTERN : VV1 0x37  V66 V0F38 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i64 REG2=XMM_B():r:dq:i64
+}
+
+{
+ICLASS    : VPHADDW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x01  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x01  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPHADDD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x02  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x02  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+{
+ICLASS    : VPHADDSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x03  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x03  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPHSUBW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x05  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x05  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPHSUBD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x06  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x06  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+{
+ICLASS    : VPHSUBSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x07  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x07  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+
+{
+ICLASS    : VPMULHUW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xE4  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0xE4  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+{
+ICLASS    : VPMULHRSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x0B  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x0B  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPMULHW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xE5  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xE5  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPMULLW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xD5  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xD5  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPMULLD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x40  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x40  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+
+{
+ICLASS    : VPMULUDQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF4  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u32
+
+PATTERN : VV1 0xF4  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u32
+}
+{
+ICLASS    : VPMULDQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x28  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i64 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x28  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i64 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+
+{
+ICLASS    : VPSADBW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF6  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0xF6  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPSHUFB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x00  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0x00  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+
+{
+ICLASS    : VPSIGNB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x08  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8
+
+PATTERN : VV1 0x08  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPSIGNW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x09  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0x09  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPSIGND
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x0A  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x0A  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+
+{
+ICLASS    : VPSUBSB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xE8  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8
+
+PATTERN : VV1 0xE8  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPSUBSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xE9  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xE9  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+
+{
+ICLASS    : VPSUBUSB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xD8  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0xD8  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPSUBUSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xD9  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0xD9  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+
+{
+ICLASS    : VPSUBB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF8  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8
+
+PATTERN : VV1 0xF8  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPSUBW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF9  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xF9  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPSUBD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xFA  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0xFA  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+{
+ICLASS    : VPSUBQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xFB  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i64 REG1=XMM_N():r:dq:i64 MEM0:r:dq:i64
+
+PATTERN : VV1 0xFB  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i64 REG1=XMM_N():r:dq:i64 REG2=XMM_B():r:dq:i64
+}
+
+{
+ICLASS    : VPUNPCKHBW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x68  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0x68  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPUNPCKHWD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x69  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0x69  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+{
+ICLASS    : VPUNPCKHDQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x6A  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u32
+
+PATTERN : VV1 0x6A  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u32
+}
+{
+ICLASS    : VPUNPCKHQDQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x6D  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0x6D  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+}
+
+{
+ICLASS    : VPUNPCKLBW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x60  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0x60  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPUNPCKLWD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x61  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0x61  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+{
+ICLASS    : VPUNPCKLDQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x62  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u32
+
+PATTERN : VV1 0x62  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u32
+}
+{
+ICLASS    : VPUNPCKLQDQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x6C  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0x6C  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+}
+
+
+
+{
+ICLASS    : VPSRLDQ
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x73  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b011] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:u128 REG1=XMM_B():r:dq:u128 IMM0:r:b   # NDD
+}
+{
+ICLASS    : VPSLLDQ
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x73  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b111] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:u128 REG1=XMM_B():r:dq:u128 IMM0:r:b   # NDD
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+{
+ICLASS    : VMOVLHPS
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+PATTERN : VV1 0x16  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:q:f32 REG2=XMM_B():r:q:f32
+}
+{
+ICLASS    : VMOVHLPS
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+PATTERN : VV1 0x12  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+}
+
+
+
+
+
+
+
+{
+ICLASS    : VPALIGNR
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x0F  VL128 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8 IMM0:r:b
+
+PATTERN : VV1 0x0F  VL128 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8 IMM0:r:b
+}
+{
+ICLASS    : VPBLENDW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x0E  VL128 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16 IMM0:r:b
+
+PATTERN : VV1 0x0E  VL128 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16 IMM0:r:b
+}
+
+
+
+
+
+
+
+
+
+
+
+
+############################################################
+{
+ICLASS    : VROUNDPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x09  VL128 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64  MEM0:r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0x09  VL128 V66 V0F3A NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_B():r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0x09  VL256 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64  MEM0:r:qq:f64 IMM0:r:b
+
+PATTERN : VV1 0x09  VL256 V66 V0F3A NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_B():r:qq:f64 IMM0:r:b
+}
+{
+ICLASS    : VROUNDPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x08  VL128 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32  MEM0:r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x08  VL128 V66 V0F3A NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_B():r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0x08  VL256 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32  MEM0:r:qq:f32 IMM0:r:b
+
+PATTERN : VV1 0x08  VL256 V66 V0F3A NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_B():r:qq:f32 IMM0:r:b
+}
+{
+ICLASS    : VROUNDSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR simd_scalar
+PATTERN : VV1 0x0B  V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64  REG1=XMM_N():r:dq:f64  MEM0:r:q:f64         IMM0:r:b
+
+PATTERN : VV1 0x0B  V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64  REG1=XMM_N():r:dq:f64  REG2=XMM_B():r:q:f64 IMM0:r:b
+}
+{
+ICLASS    : VROUNDSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR simd_scalar
+PATTERN : VV1 0x0A  V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_N():r:dq:f32  MEM0:r:d:f32         IMM0:r:b
+
+PATTERN : VV1 0x0A  V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_N():r:dq:f32  REG2=XMM_B():r:d:f32 IMM0:r:b
+}
+
+{
+ICLASS    : VSHUFPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xC6  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0xC6  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 IMM0:r:b
+
+PATTERN : VV1 0xC6  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 IMM0:r:b
+
+PATTERN : VV1 0xC6  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 IMM0:r:b
+}
+{
+ICLASS    : VSHUFPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xC6  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0xC6  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 IMM0:r:b
+
+PATTERN : VV1 0xC6  VL256 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 IMM0:r:b
+
+PATTERN : VV1 0xC6  VL256 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 IMM0:r:b
+}
+
+{
+ICLASS    : VRCPPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x53  VNP VL128 NOVSR V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32  MEM0:r:dq:f32
+
+PATTERN : VV1 0x53  VNP VL128 NOVSR V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x53  VNP VL256 NOVSR V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32  MEM0:r:qq:f32
+
+PATTERN : VV1 0x53  VNP VL256 NOVSR V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32  REG1=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VRCPSS
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: simd_scalar
+PATTERN : VV1 0x53  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x53  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VRSQRTPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x52  VNP VL128 NOVSR V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32  MEM0:r:dq:f32
+
+PATTERN : VV1 0x52  VNP VL128 NOVSR V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x52  VNP VL256 NOVSR V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32  MEM0:r:qq:f32
+
+PATTERN : VV1 0x52  VNP VL256 NOVSR V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32  REG1=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VRSQRTSS
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: simd_scalar
+PATTERN : VV1 0x52  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x52  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VSQRTPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x51  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x51  VL128 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64  REG1=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x51  VL256 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64  MEM0:r:qq:f64
+
+PATTERN : VV1 0x51  VL256 V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64  REG1=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VSQRTPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x51  VL128 VNP NOVSR V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32  MEM0:r:dq:f32
+
+PATTERN : VV1 0x51  VL128 VNP NOVSR V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x51  VL256 VNP NOVSR V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32  MEM0:r:qq:f32
+
+PATTERN : VV1 0x51  VL256 VNP NOVSR V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32  REG1=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VSQRTSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : MXCSR simd_scalar
+PATTERN : VV1 0x51  VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64
+
+PATTERN : VV1 0x51  VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64
+}
+{
+ICLASS    : VSQRTSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR simd_scalar
+PATTERN : VV1 0x51  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x51  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+
+{
+ICLASS    : VUNPCKHPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x15  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x15  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x15  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x15  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VUNPCKHPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x15  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x15  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x15  VL256 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x15  VL256 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+
+{
+ICLASS    : VSUBPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x5C  V66 V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x5C  V66 V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x5C  V66 V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x5C  V66 V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VSUBPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x5C  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x5C  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x5C  VL256 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x5C  VL256 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VSUBSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : MXCSR SIMD_SCALAR
+PATTERN : VV1 0x5C  VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64
+
+PATTERN : VV1 0x5C  VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64
+}
+{
+ICLASS    : VSUBSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR simd_scalar
+PATTERN : VV1 0x5C  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x5C  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VMULPD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x59  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x59  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x59  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x59  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VMULPS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN : VV1 0x59  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x59  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x59  VL256 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x59  VL256 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VMULSD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : MXCSR simd_scalar
+PATTERN : VV1 0x59  VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:q:f64
+
+PATTERN : VV1 0x59  VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:q:f64
+}
+{
+ICLASS    : VMULSS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR simd_scalar
+PATTERN : VV1 0x59  VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:d:f32
+
+PATTERN : VV1 0x59  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VORPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+PATTERN : VV1 0x56  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0x56  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+
+PATTERN : VV1 0x56  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:qq:u64
+
+PATTERN : VV1 0x56  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=YMM_B():r:qq:u64
+}
+{
+ICLASS    : VORPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+PATTERN : VV1 0x56  VNP V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u32
+
+PATTERN : VV1 0x56  VNP V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u32
+
+PATTERN : VV1 0x56  VNP V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 MEM0:r:qq:u32
+
+PATTERN : VV1 0x56  VNP V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 REG2=YMM_B():r:qq:u32
+}
+
+{
+ICLASS    : VPMAXSB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x3C  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8
+
+PATTERN : VV1 0x3C  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPMAXSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xEE  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xEE  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPMAXSD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x3D  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x3D  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+
+{
+ICLASS    : VPMAXUB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xDE  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0xDE  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPMAXUW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x3E  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0x3E  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+{
+ICLASS    : VPMAXUD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x3F  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u32
+
+PATTERN : VV1 0x3F  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u32
+}
+
+{
+ICLASS    : VPMINSB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x38  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8
+
+PATTERN : VV1 0x38  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8
+}
+{
+ICLASS    : VPMINSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xEA  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xEA  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPMINSD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x39  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 MEM0:r:dq:i32
+
+PATTERN : VV1 0x39  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i32 REG2=XMM_B():r:dq:i32
+}
+
+{
+ICLASS    : VPMINUB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xDA  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+
+PATTERN : VV1 0xDA  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+}
+{
+ICLASS    : VPMINUW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x3A  V66 V0F38 VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 MEM0:r:dq:u16
+
+PATTERN : VV1 0x3A  V66 V0F38 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u16 REG2=XMM_B():r:dq:u16
+}
+{
+ICLASS    : VPMINUD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x3B  V66 V0F38 VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 MEM0:r:dq:u32
+
+PATTERN : VV1 0x3B  V66 V0F38 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32 REG1=XMM_N():r:dq:u32 REG2=XMM_B():r:dq:u32
+}
+
+
+{
+ICLASS    : VPMADDWD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xF5  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i16 MEM0:r:dq:i16
+
+PATTERN : VV1 0xF5  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32 REG1=XMM_N():r:dq:i16 REG2=XMM_B():r:dq:i16
+}
+{
+ICLASS    : VPMADDUBSW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x04  VL128 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:u8 MEM0:r:dq:i8
+
+PATTERN : VV1 0x04  VL128 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:i8
+}
+
+
+{
+ICLASS    : VMPSADBW
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x42  VL128 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8 IMM0:r:b
+
+PATTERN : VV1 0x42  VL128 V66 V0F3A MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u16 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8 IMM0:r:b
+}
+
+
+############################################################
+{
+ICLASS    : VPSLLW
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x71  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b110] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:u16 REG1=XMM_B():r:dq:u16 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSLLD
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x72  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b110] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:u32 REG1=XMM_B():r:dq:u32 IMM0:r:b  #NDD
+}
+{
+ICLASS    : VPSLLQ
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x73  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b110] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:u64 REG1=XMM_B():r:dq:u64 IMM0:r:b # NDD
+}
+
+{
+ICLASS    : VPSRAW
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x71  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b100] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:i16 REG1=XMM_B():r:dq:i16 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSRAD
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x72  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b100] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:i32 REG1=XMM_B():r:dq:i32 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSRLW
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x71  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b010] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:u16 REG1=XMM_B():r:dq:u16 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSRLD
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x72  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b010] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:u32 REG1=XMM_B():r:dq:u32 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSRLQ
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x73  VL128 V66 V0F MOD[0b11] MOD=3 REG[0b010] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_N():w:dq:u64 REG1=XMM_B():r:dq:u64 IMM0:r:b  # NDD
+}
+
+
+{
+ICLASS    : VUCOMISD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+FLAGS     : MUST [ zf-mod pf-mod cf-mod of-0 af-0 sf-0 ]
+
+PATTERN : VV1 0x2E V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():r:dq:f64  MEM0:r:q:f64
+
+PATTERN : VV1 0x2E V66 V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():r:dq:f64  REG1=XMM_B():r:q:f64
+}
+
+{
+ICLASS    : VUCOMISS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar MXCSR
+
+FLAGS     : MUST [ zf-mod pf-mod cf-mod of-0 af-0 sf-0 ]
+
+PATTERN : VV1 0x2E VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():r:dq:f32  MEM0:r:d:f32
+
+PATTERN : VV1 0x2E VNP V0F NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():r:dq:f32  REG1=XMM_B():r:d:f32
+}
+
+###############################################
+
+
+{
+ICLASS    : VUNPCKLPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x14  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+
+PATTERN : VV1 0x14  VL128 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+PATTERN : VV1 0x14  VL256 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+
+PATTERN : VV1 0x14  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+
+
+{
+ICLASS    : VUNPCKLPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x14  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+
+PATTERN : VV1 0x14  VL128 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+PATTERN : VV1 0x14  VL256 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+
+PATTERN : VV1 0x14  VL256 VNP V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+
+
+
+
+{
+ICLASS    : VXORPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+PATTERN : VV1 0x57  V66 V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0x57  V66 V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64 REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64
+
+PATTERN : VV1 0x57  V66 V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:qq:u64
+
+PATTERN : VV1 0x57  V66 V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=YMM_B():r:qq:u64
+}
+
+
+{
+ICLASS    : VXORPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : LOGICAL_FP
+EXTENSION : AVX
+PATTERN : VV1 0x57  VNP V0F VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x57  VNP V0F VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq REG2=XMM_B():r:dq
+
+PATTERN : VV1 0x57  VNP V0F VL256 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x57  VNP V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq REG2=YMM_B():r:qq
+}
+
+
+############################################################################
+
+{
+ICLASS    : VMOVSS
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar
+
+# NOTE: REG1 is ignored!!!
+PATTERN : VV1 0x10  VF3 V0F MOD[mm] MOD!=3  NOVSR REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32  MEM0:r:d:f32
+
+PATTERN   : VV1 0x10  VF3 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_N():r:dq:f32    REG2=XMM_B():r:d:f32
+IFORM     : VMOVSS_XMMdq_XMMdq_XMMd_10
+
+PATTERN : VV1 0x11  VF3 V0F  MOD[mm] MOD!=3 NOVSR  REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:d:f32          REG0=XMM_R():r:d:f32
+
+PATTERN : VV1 0x11  VF3 V0F  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_B():w:dq:f32   REG1=XMM_N():r:dq:f32   REG2=XMM_R():r:d:f32
+IFORM     : VMOVSS_XMMdq_XMMdq_XMMd_11
+}
+############################################################################
+{
+ICLASS    : VMOVSD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES : simd_scalar
+
+# NOTE: REG1 is ignored!!!
+PATTERN : VV1 0x10  VF2 V0F MOD[mm] MOD!=3  NOVSR REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64   MEM0:r:q:f64
+
+PATTERN : VV1 0x10  VF2 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64  REG1=XMM_N():r:dq:f64    REG2=XMM_B():r:q:f64
+IFORM     : VMOVSD_XMMdq_XMMdq_XMMq_10
+
+PATTERN : VV1 0x11  VF2 V0F MOD[mm] MOD!=3 NOVSR REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:q:f64           REG0=XMM_R():r:q:f64
+
+PATTERN : VV1 0x11  VF2 V0F  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_B():w:dq:f64   REG1=XMM_N():r:dq:f64  REG2=XMM_R():r:q:f64
+IFORM     : VMOVSD_XMMdq_XMMdq_XMMq_11
+}
+############################################################################
+{
+ICLASS    : VMOVUPD
+EXCEPTIONS: avx-type-4M
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+
+PATTERN : VV1 0x10  V66 VL128 V0F NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64   MEM0:r:dq:f64
+
+PATTERN : VV1 0x10  V66 VL128 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f64   REG1=XMM_B():r:dq:f64
+IFORM     : VMOVUPD_XMMdq_XMMdq_10
+
+PATTERN : VV1 0x11  V66 VL128 V0F NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:dq:f64           REG0=XMM_R():r:dq:f64
+
+PATTERN : VV1 0x11  V66 VL128 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_B():w:dq:f64   REG1=XMM_R():r:dq:f64
+IFORM     : VMOVUPD_XMMdq_XMMdq_11
+
+# 256b versions
+
+PATTERN : VV1 0x10  V66 VL256 V0F NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f64      MEM0:r:qq:f64
+
+PATTERN : VV1 0x10  V66 VL256 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64      REG1=YMM_B():r:qq:f64
+IFORM     : VMOVUPD_YMMqq_YMMqq_10
+
+PATTERN : VV1 0x11  V66 VL256 V0F NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:qq:f64              REG0=YMM_R():r:qq:f64
+
+PATTERN : VV1 0x11  V66 VL256 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_B():w:qq:f64      REG1=YMM_R():r:qq:f64
+IFORM     : VMOVUPD_YMMqq_YMMqq_11
+}
+
+############################################################################
+{
+ICLASS    : VMOVUPS
+EXCEPTIONS: avx-type-4M
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+
+PATTERN : VV1 0x10  VNP VL128 V0F NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32   MEM0:r:dq:f32
+
+PATTERN : VV1 0x10  VNP VL128 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32   REG1=XMM_B():r:dq:f32
+IFORM     : VMOVUPS_XMMdq_XMMdq_10
+
+PATTERN : VV1 0x11  VNP VL128 V0F NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:dq:f32           REG0=XMM_R():r:dq:f32
+
+PATTERN : VV1 0x11  VNP VL128 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_B():w:dq:f32   REG1=XMM_R():r:dq:f32
+IFORM     : VMOVUPS_XMMdq_XMMdq_11
+
+# 256b versions
+
+PATTERN : VV1 0x10  VNP VL256 V0F NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:f32      MEM0:r:qq:f32
+
+PATTERN : VV1 0x10  VNP VL256 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32      REG1=YMM_B():r:qq:f32
+IFORM     : VMOVUPS_YMMqq_YMMqq_10
+
+PATTERN : VV1 0x11  VNP VL256 V0F NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:qq:f32              REG0=YMM_R():r:qq:f32
+
+PATTERN : VV1 0x11  VNP VL256 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_B():w:qq:f32      REG1=YMM_R():r:qq:f32
+IFORM     : VMOVUPS_YMMqq_YMMqq_11
+}
+
+
+############################################################################
+{
+ICLASS    : VMOVLPD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+COMMENT: 3op version uses high part of XMM_N
+PATTERN : VV1 0x12  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64   REG1=XMM_N():r:dq:f64   MEM0:r:q:f64
+
+PATTERN : VV1 0x13  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:q:f64            REG0=XMM_R():r:q:f64
+}
+
+{
+ICLASS    : VMOVLPS
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+
+COMMENT: 3op version uses high part of XMM_N
+PATTERN : VV1 0x12  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32   REG1=XMM_N():r:dq:f32   MEM0:r:q:f32
+
+PATTERN : VV1 0x13  VL128 VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:q:f32            REG0=XMM_R():r:q:f32
+}
+
+{
+ICLASS    : VMOVHPD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+COMMENT:  3op form use low bits of REG1, 2op form uses high bits of REG0
+PATTERN : VV1 0x16  VL128 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f64   REG1=XMM_N():r:q:f64   MEM0:r:q:f64
+
+PATTERN : VV1 0x17  VL128 V66 V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:q:f64            REG0=XMM_R():r:dq:f64
+}
+
+{
+ICLASS    : VMOVHPS
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+
+COMMENT:  3op form use low bits of REG1, 2op form uses high bits of REG0
+PATTERN : VV1 0x16  VL128 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:f32   REG1=XMM_N():r:q:f32   MEM0:r:q:f32
+
+PATTERN : VV1 0x17  VL128 VNP V0F NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : MEM0:w:q:f32            REG0=XMM_R():r:dq:f32
+}
+############################################################################
+
+{
+ICLASS    : VMOVMSKPD
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+PATTERN : VV1 0x50  VL128 V66 V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d   REG1=XMM_B():r:dq:f64
+
+# 256b versions
+
+PATTERN : VV1 0x50  VL256 V66 V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d   REG1=YMM_B():r:qq:f64
+}
+
+{
+ICLASS    : VMOVMSKPS
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+PATTERN : VV1 0x50  VL128 VNP V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d   REG1=XMM_B():r:dq:f32
+
+# 256b versions
+
+PATTERN : VV1 0x50  VL256 VNP V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d   REG1=YMM_B():r:qq:f32
+}
+
+############################################################################
+{
+ICLASS    : VPMOVMSKB
+EXCEPTIONS: avx-type-7
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0xD7  VL128 V66 V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:u32   REG1=XMM_B():r:dq:i8
+}
+
+############################################################################
+
+############################################################################
+# SX versions
+############################################################################
+
+{
+ICLASS    : VPMOVSXBW
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x20  VL128 V66 V0F38 NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i16   REG1=XMM_B():r:q:i8
+PATTERN : VV1 0x20  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i16  MEM0:r:q:i8
+}
+
+############################################################################
+{
+ICLASS    : VPMOVSXBD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x21  VL128 V66 V0F38 NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32   REG1=XMM_B():r:d:i8
+PATTERN : VV1 0x21  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32   MEM0:r:d:i8
+}
+############################################################################
+{
+ICLASS    : VPMOVSXBQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x22  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i64   REG1=XMM_B():r:w:i8
+PATTERN : VV1 0x22  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i64   MEM0:r:w:i8
+}
+############################################################################
+{
+ICLASS    : VPMOVSXWD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x23  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i32   REG1=XMM_B():r:q:i16
+PATTERN : VV1 0x23  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i32   MEM0:r:q:i16
+}
+############################################################################
+{
+ICLASS    : VPMOVSXWQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x24  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i64   REG1=XMM_B():r:d:i16
+PATTERN : VV1 0x24  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i64   MEM0:r:d:i16
+}
+############################################################################
+{
+ICLASS    : VPMOVSXDQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x25  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:i64   REG1=XMM_B():r:q:i32
+PATTERN : VV1 0x25  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:i64   MEM0:r:q:i32
+}
+
+
+
+
+
+############################################################################
+# ZX versions
+############################################################################
+
+{
+ICLASS    : VPMOVZXBW
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x30  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16   REG1=XMM_B():r:q:u8
+PATTERN : VV1 0x30  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16   MEM0:r:q:u8
+}
+
+############################################################################
+{
+ICLASS    : VPMOVZXBD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x31  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32   REG1=XMM_B():r:d:u8
+PATTERN : VV1 0x31  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32   MEM0:r:d:u8
+}
+############################################################################
+{
+ICLASS    : VPMOVZXBQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x32  V66 V0F38 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64   REG1=XMM_B():r:w:u8
+PATTERN : VV1 0x32  V66 V0F38 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64   MEM0:r:w:u8
+}
+############################################################################
+{
+ICLASS    : VPMOVZXWD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x33  V66 V0F38 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32   REG1=XMM_B():r:q:u16
+PATTERN : VV1 0x33  V66 V0F38 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32   MEM0:r:q:u16
+}
+############################################################################
+{
+ICLASS    : VPMOVZXWQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x34  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64   REG1=XMM_B():r:d:u16
+PATTERN : VV1 0x34  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64   MEM0:r:d:u16
+}
+############################################################################
+{
+ICLASS    : VPMOVZXDQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x35  VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64   REG1=XMM_B():r:q:u32
+PATTERN : VV1 0x35  VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64   MEM0:r:q:u32
+}
+
+
+
+############################################################################
+############################################################################
+{
+ICLASS    : VPEXTRB
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+COMMENT: WIG
+PATTERN : VV1 0x14  VL128 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : MEM0:w:b           REG0=XMM_R():r:dq:u8 IMM0:r:b
+
+PATTERN : VV1 0x14  VL128 V66 V0F3A NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=GPR32_B():w:d REG1=XMM_R():r:dq:u8 IMM0:r:b
+}
+############################################################################
+{
+ICLASS    : VPEXTRW
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+COMMENT: WIG
+
+PATTERN : VV1 0x15  VL128 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : MEM0:w:w           REG0=XMM_R():r:dq:u16 IMM0:r:b
+
+PATTERN : VV1 0x15  VL128 V66 V0F3A NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=GPR32_B():w:d REG1=XMM_R():r:dq:u16 IMM0:r:b
+IFORM     : VPEXTRW_GPR32d_XMMdq_IMMb_15
+
+# special C5 reg-only versions from SSE2:
+
+PATTERN   : VV1 0xC5  VL128 V66 V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=GPR32_R():w:d    REG1=XMM_B():r:dq:u16 IMM0:r:b
+IFORM     : VPEXTRW_GPR32d_XMMdq_IMMb_C5
+}
+############################################################################
+{
+ICLASS    : VPEXTRQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x16  VL128 V66 V0F3A mode64 rexw_prefix  NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : MEM0:w:q              REG0=XMM_R():r:dq:u64 IMM0:r:b
+PATTERN : VV1 0x16  VL128 V66 V0F3A mode64 rexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=GPR64_B():w:q    REG1=XMM_R():r:dq:u64 IMM0:r:b
+}
+############################################################################
+{
+ICLASS    : VPEXTRD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+COMMENT   : SNB had an errata where it would #UD of VEX.W=1 outside of 64b mode.  Not modeled.
+
+# 64b mode
+PATTERN   : VV1 0x16 VL128 V66 V0F3A mode64 norexw_prefix NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : MEM0:w:d REG0=XMM_R():r:dq:u32 IMM0:r:b
+PATTERN   : VV1 0x16 VL128 V66 V0F3A mode64 norexw_prefix NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=GPR32_B():w:d REG1=XMM_R():r:dq:u32 IMM0:r:b
+
+# not64b mode
+PATTERN   : VV1 0x16 VL128 V66 V0F3A not64  NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : MEM0:w:d REG0=XMM_R():r:dq:u32 IMM0:r:b
+PATTERN   : VV1 0x16 VL128 V66 V0F3A not64  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=GPR32_B():w:d REG1=XMM_R():r:dq:u32 IMM0:r:b
+
+}
+############################################################################
+
+
+
+
+
+
+{
+ICLASS    : VPINSRB
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+COMMENT: WIG
+PATTERN : VV1 0x20  VL128 V66 V0F3A  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u8     REG1=XMM_N():r:dq:u8  MEM0:r:b:u8            IMM0:r:b
+PATTERN : VV1 0x20  VL128 V66 V0F3A  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u8     REG1=XMM_N():r:dq:u8  REG2=GPR32_B():r:d:u8  IMM0:r:b
+}
+
+{
+ICLASS    : VPINSRW
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+COMMENT : WIG
+PATTERN : VV1 0xC4  VL128 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u16     REG1=XMM_N():r:dq:u16  MEM0:r:w:u16           IMM0:r:b
+
+PATTERN : VV1 0xC4  VL128 V66 V0F  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u16     REG1=XMM_N():r:dq:u16  REG2=GPR32_B():r:d:u16  IMM0:r:b
+}
+
+{
+ICLASS    : VPINSRD
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+COMMENT   : SNB had an errata where it would #UD of VEX.W=1 outside of 64b mode. Not modeled
+# 64b mode
+PATTERN : VV1 0x22  VL128 V66 V0F3A mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u32     REG1=XMM_N():r:dq:u32  MEM0:r:d:u32            IMM0:r:b
+PATTERN : VV1 0x22  VL128 V66 V0F3A mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u32     REG1=XMM_N():r:dq:u32  REG2=GPR32_B():r:d:u32  IMM0:r:b
+
+# 32b mode
+PATTERN : VV1 0x22  VL128 V66 V0F3A not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u32     REG1=XMM_N():r:dq:u32  MEM0:r:d:u32            IMM0:r:b
+PATTERN : VV1 0x22  VL128 V66 V0F3A not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u32     REG1=XMM_N():r:dq:u32  REG2=GPR32_B():r:d:u32  IMM0:r:b
+}
+{
+ICLASS    : VPINSRQ
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x22  VL128 V66 V0F3A mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u64     REG1=XMM_N():r:dq:u64  MEM0:r:q:u64            IMM0:r:b
+PATTERN : VV1 0x22  VL128 V66 V0F3A mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u64     REG1=XMM_N():r:dq:u64  REG2=GPR64_B():r:q:u64  IMM0:r:b
+}
+
+############################################################################
+
+
+
+
+
+{
+ICLASS    : VPCMPESTRI
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : STTNI
+EXTENSION : AVX
+FLAGS     : MUST [ cf-mod zf-mod sf-mod of-mod af-0 pf-0 ]
+
+# outside of 64b mode, vex.w is ignored for this instr
+PATTERN : VV1 0x61  VL128 V66 V0F3A NOVSR not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_EDX:r:SUPP REG3=XED_REG_ECX:w:SUPP
+PATTERN : VV1 0x61  VL128 V66 V0F3A NOVSR not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_EAX:r:SUPP REG3=XED_REG_EDX:r:SUPP REG4=XED_REG_ECX:w:SUPP
+
+# in 64b mode, vex.w changes the behavior for GPRs
+PATTERN : VV1 0x61  VL128 V66 V0F3A NOVSR mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_EDX:r:SUPP REG3=XED_REG_ECX:w:SUPP
+PATTERN : VV1 0x61  VL128 V66 V0F3A NOVSR mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_EAX:r:SUPP REG3=XED_REG_EDX:r:SUPP REG4=XED_REG_ECX:w:SUPP
+
+PATTERN : VV1 0x61  VL128 V66 V0F3A NOVSR mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_RAX:r:SUPP REG2=XED_REG_RDX:r:SUPP REG3=XED_REG_RCX:w:SUPP
+PATTERN : VV1 0x61  VL128 V66 V0F3A NOVSR mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_RAX:r:SUPP REG3=XED_REG_RDX:r:SUPP REG4=XED_REG_RCX:w:SUPP
+}
+{
+ICLASS    : VPCMPISTRI
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : STTNI
+EXTENSION : AVX
+FLAGS     : MUST [ cf-mod zf-mod sf-mod of-mod af-0 pf-0 ]
+
+# outside of 64b mode, vex.w is ignored for this instr
+PATTERN : VV1 0x63  VL128 V66 V0F3A NOVSR  not64  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_ECX:w:SUPP
+PATTERN : VV1 0x63  VL128 V66 V0F3A NOVSR  not64  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_ECX:w:SUPP
+
+# in 64b mode, vex.w changes the behavior for GPRs
+PATTERN : VV1 0x63  VL128 V66 V0F3A NOVSR mode64 norexw_prefix  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_ECX:w:SUPP
+PATTERN : VV1 0x63  VL128 V66 V0F3A NOVSR mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_ECX:w:SUPP
+
+PATTERN : VV1 0x63  VL128 V66 V0F3A NOVSR mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_RCX:w:SUPP
+PATTERN : VV1 0x63  VL128 V66 V0F3A NOVSR mode64 rexw_prefix  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_RCX:w:SUPP
+}
+
+{
+ICLASS    : VPCMPESTRM
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : STTNI
+EXTENSION : AVX
+FLAGS     : MUST [ cf-mod zf-mod sf-mod of-mod af-0 pf-0 ]
+
+# outside of 64b mode, vex.w is ignored for this instr
+PATTERN : VV1 0x60  VL128 V66 V0F3A NOVSR not64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_EDX:r:SUPP REG3=XED_REG_XMM0:w:dq:SUPP
+PATTERN : VV1 0x60  VL128 V66 V0F3A NOVSR not64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_EAX:r:SUPP REG3=XED_REG_EDX:r:SUPP REG4=XED_REG_XMM0:w:dq:SUPP
+
+# in 64b mode, vex.w changes the behavior for GPRs
+PATTERN : VV1 0x60  VL128 V66 V0F3A NOVSR mode64 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_EDX:r:SUPP REG3=XED_REG_XMM0:w:dq:SUPP
+PATTERN : VV1 0x60  VL128 V66 V0F3A NOVSR mode64 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_EAX:r:SUPP REG3=XED_REG_EDX:r:SUPP REG4=XED_REG_XMM0:w:dq:SUPP
+
+PATTERN : VV1 0x60  VL128 V66 V0F3A NOVSR mode64 rexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_RAX:r:SUPP REG2=XED_REG_RDX:r:SUPP REG3=XED_REG_XMM0:w:dq:SUPP
+PATTERN : VV1 0x60  VL128 V66 V0F3A NOVSR mode64 rexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_RAX:r:SUPP REG3=XED_REG_RDX:r:SUPP REG4=XED_REG_XMM0:w:dq:SUPP
+}
+
+{
+ICLASS    : VPCMPISTRM
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : STTNI
+EXTENSION : AVX
+FLAGS     : MUST [ cf-mod zf-mod sf-mod of-mod af-0 pf-0 ]
+PATTERN : VV1 0x62  VL128 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     MEM0:r:dq         IMM0:r:b REG1=XED_REG_XMM0:w:dq:SUPP
+PATTERN : VV1 0x62  VL128 V66 V0F3A NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():r:dq     REG1=XMM_B():r:dq IMM0:r:b REG2=XED_REG_XMM0:w:dq:SUPP
+}
+####################################################################################
+
+
+
+####################################################################################
+{
+ICLASS    : VMASKMOVDQU
+EXCEPTIONS: avx-type-4
+CPL       : 3
+
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES : maskop fixed_base0 NOTSX NONTEMPORAL
+PATTERN : VV1 0xF7 V0F V66 VL128  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():r:dq:u8 REG1=XMM_B():r:dq:u8 MEM0:w:SUPP:dq:u8 BASE0=ArDI():r:SUPP SEG0=FINAL_DSEG():r:SUPP
+}
+
+####################################################################################
+{
+ICLASS    : VLDMXCSR
+EXCEPTIONS: avx-type-5L
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR
+PATTERN   : VV1 0xAE VL128 VNP V0F NOVSR MOD[mm] MOD!=3 REG[0b010] RM[nnn] no_refining_prefix MODRM()
+OPERANDS  : MEM0:r:d REG0=XED_REG_MXCSR:w:SUPP
+}
+{
+ICLASS    : VSTMXCSR
+EXCEPTIONS: avx-type-5
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+ATTRIBUTES: MXCSR_RD
+PATTERN   : VV1 0xAE VL128 VNP V0F NOVSR MOD[mm] MOD!=3 REG[0b011] RM[nnn] no_refining_prefix MODRM()
+OPERANDS  : MEM0:w:d REG0=XED_REG_MXCSR:r:SUPP
+}
+#######################################################################################
+
+{
+ICLASS    : VPBLENDVB
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+
+# W0 (modrm.rm memory op 2nd to last)
+PATTERN : VV1 0x4C   VL128 V66 V0F3A norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 MEM0:r:dq:i8 REG2=XMM_SE():r:dq:i8
+
+PATTERN : VV1 0x4C   VL128 V66 V0F3A norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:i8 REG1=XMM_N():r:dq:i8 REG2=XMM_B():r:dq:i8 REG3=XMM_SE():r:dq:i8
+}
+
+{
+ICLASS    : VBLENDVPD
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+
+# W0 (modrm.rm memory op 2nd to last)
+PATTERN : VV1 0x4B   V66 V0F3A VL128 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64 REG2=XMM_SE():r:dq:u64
+
+PATTERN : VV1 0x4B   V66 V0F3A VL128 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64 REG3=XMM_SE():r:dq:u64
+
+PATTERN : VV1 0x4B   V66 V0F3A VL256 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64 REG2=YMM_SE():r:qq:u64
+
+PATTERN : VV1 0x4B   V66 V0F3A VL256 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64 REG3=YMM_SE():r:qq:u64
+
+}
+
+{
+ICLASS    : VBLENDVPS
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+
+# W0 (modrm.rm memory op 2nd to last)
+PATTERN : VV1 0x4A   V66 V0F3A VL128 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32 REG2=XMM_SE():r:dq:u32
+
+PATTERN : VV1 0x4A   V66 V0F3A VL128 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=XMM_R():w:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32 REG3=XMM_SE():r:dq:u32
+
+PATTERN : VV1 0x4A   V66 V0F3A VL256 norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32 REG2=YMM_SE():r:qq:u32
+
+PATTERN : VV1 0x4A   V66 V0F3A VL256 norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32 REG3=YMM_SE():r:qq:u32
+
+
+}
+
+#######################################################################################
+
+
+
+{
+ICLASS    : VMOVNTDQA
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT NOTSX  NONTEMPORAL
+
+PATTERN : VV1 0x2A  V66 V0F38 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq MEM0:r:dq
+}
+
+
+
+
+
+{
+ICLASS    : VMOVNTDQ
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT NOTSX NONTEMPORAL
+PATTERN : VV1 0xE7  V66 V0F VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : MEM0:w:dq:i32  REG0=XMM_R():r:dq:i32
+
+}
+{
+ICLASS    : VMOVNTPD
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT NOTSX NONTEMPORAL
+PATTERN : VV1 0x2B  V66 V0F VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : MEM0:w:dq:f64  REG0=XMM_R():r:dq:f64
+
+}
+{
+ICLASS    : VMOVNTPS
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT NOTSX NONTEMPORAL
+PATTERN : VV1 0x2B  VNP V0F VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : MEM0:w:dq:f32  REG0=XMM_R():r:dq:f32
+
+}
+
+
+
+###FILE: ./datafiles/avx/avx-movnt-store.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+
+
+{
+ICLASS    : VMOVNTDQ
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT NOTSX NONTEMPORAL
+PATTERN : VV1 0xE7  V66 V0F VL256 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : MEM0:w:qq:i32  REG0=YMM_R():r:qq:i32
+
+}
+{
+ICLASS    : VMOVNTPD
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT NOTSX NONTEMPORAL
+PATTERN : VV1 0x2B  V66 V0F VL256 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : MEM0:w:qq:f64  REG0=YMM_R():r:qq:f64
+
+}
+{
+ICLASS    : VMOVNTPS
+EXCEPTIONS: avx-type-1
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX
+ATTRIBUTES :  REQUIRES_ALIGNMENT NOTSX NONTEMPORAL
+PATTERN : VV1 0x2B  VNP V0F VL256 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : MEM0:w:qq:f32  REG0=YMM_R():r:qq:f32
+
+}
+
+
+
+###FILE: ./datafiles/avx/avx-aes-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+
+{
+ICLASS    : VAESKEYGENASSIST
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AES
+EXTENSION : AVXAES
+PATTERN : VV1 0xDF VL128 V66 V0F3A  NOVSR MOD[0b11] MOD=3  REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq  REG1=XMM_B():r:dq IMM0:r:b
+PATTERN : VV1 0xDF  VL128 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq  MEM0:r:dq IMM0:r:b
+}
+{
+ICLASS    : VAESENC
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AES
+EXTENSION : AVXAES
+PATTERN : VV1 0xDC V66 V0F38  MOD[0b11] MOD=3  REG[rrr] RM[nnn] VL128
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq  REG2=XMM_B():r:dq
+PATTERN : VV1 0xDC V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM() VL128
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq  MEM0:r:dq
+}
+{
+ICLASS    : VAESENCLAST
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AES
+EXTENSION : AVXAES
+PATTERN : VV1 0xDD V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn] VL128
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq  REG2=XMM_B():r:dq
+PATTERN : VV1 0xDD  V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() VL128
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq  MEM0:r:dq
+}
+{
+ICLASS    : VAESDEC
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AES
+EXTENSION : AVXAES
+PATTERN : VV1 0xDE V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn] VL128
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq  REG2=XMM_B():r:dq
+PATTERN : VV1 0xDE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() VL128
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq  MEM0:r:dq
+}
+{
+ICLASS    : VAESDECLAST
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AES
+EXTENSION : AVXAES
+PATTERN : VV1 0xDF V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn] VL128
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq  REG2=XMM_B():r:dq
+PATTERN : VV1 0xDF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() VL128
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq  MEM0:r:dq
+}
+{
+ICLASS    : VAESIMC
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AES
+EXTENSION : AVXAES
+PATTERN : VV1 0xDB VL128 V66 V0F38 NOVSR MOD[0b11] MOD=3  REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq  REG1=XMM_B():r:dq
+PATTERN : VV1 0xDB VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq  MEM0:r:dq
+}
+
+
+
+###FILE: ./datafiles/avx/avx-pclmul-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+{
+ICLASS    : VPCLMULQDQ
+EXCEPTIONS: avx-type-4
+CPL       : 3
+CATEGORY  : AVX
+EXTENSION : AVX
+PATTERN : VV1 0x44  V66 V0F3A  MOD[0b11]  MOD=3  REG[rrr] RM[nnn] VL128 UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u128  REG1=XMM_N():r:dq:u64 REG2=XMM_B():r:dq:u64 IMM0:r:b
+PATTERN : VV1 0x44  V66 V0F3A  MOD[mm]  MOD!=3 REG[rrr] RM[nnn] MODRM() VL128 UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u128  REG1=XMM_N():r:dq:u64 MEM0:r:dq:u64 IMM0:r:b
+}
+
+
+###FILE: ./datafiles/ivbavx/fp16-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+{
+ICLASS    : VCVTPH2PS
+COMMENT   : UPCONVERT -- NO IMMEDIATE
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : F16C
+ATTRIBUTES : MXCSR
+EXCEPTIONS: avx-type-11
+# 128b form
+
+PATTERN : VV1 0x13 VL128 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()  W0
+OPERANDS  : REG0=XMM_R():w:dq:f32 MEM0:r:q:f16
+
+PATTERN : VV1 0x13 VL128 V66 V0F38 NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn] W0
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_B():r:q:f16
+
+
+# 256b form
+
+PATTERN : VV1 0x13 VL256 V66 V0F38 NOVSR  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() W0
+OPERANDS  : REG0=YMM_R():w:qq:f32 MEM0:r:dq:f16
+
+PATTERN : VV1 0x13 VL256 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]  W0
+OPERANDS  : REG0=YMM_R():w:qq:f32  REG1=XMM_B():r:dq:f16
+}
+
+
+{
+ICLASS    : VCVTPS2PH
+COMMENT   : DOWNCONVERT -- HAS IMMEDIATE
+CPL       : 3
+CATEGORY  : CONVERT
+EXTENSION : F16C
+ATTRIBUTES : MXCSR
+EXCEPTIONS: avx-type-11
+# 128b imm8 form
+
+PATTERN : VV1 0x1D VL128 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8() W0
+OPERANDS  : MEM0:w:q:f16 REG0=XMM_R():r:dq:f32  IMM0:r:b
+
+PATTERN : VV1 0x1D VL128 V66 V0F3A NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8() W0
+OPERANDS  : REG0=XMM_B():w:q:f16 REG1=XMM_R():r:dq:f32   IMM0:r:b
+
+# 256b imm8 form
+
+PATTERN : VV1 0x1D VL256 V66 V0F3A NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8() W0
+OPERANDS  : MEM0:w:dq:f16 REG0=YMM_R():r:qq:f32  IMM0:r:b
+
+PATTERN : VV1 0x1D VL256 V66 V0F3A NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8() W0
+OPERANDS  : REG0=XMM_B():w:dq:f16 REG1=YMM_R():r:qq:f32    IMM0:r:b
+
+}
+
+
+
+###FILE: ./datafiles/avxhsw/gather-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+
+
+# DEST in MODRM.REG
+# BASE in SIB.base
+# INDEX in SIB.index
+# MASK in VEX.VVVV   --  NOTE mask is a signed integer!!!
+
+#                    VL = 128                        VL = 256
+#            dest/mask   index  memsz        dest/mask   index   memsz
+# qps/qd      xmm       xmm      2*32=64b      xmm*       ymm*    4*32=128b
+# dps/dd      xmm       xmm      4*32=128b     ymm        ymm     8*32=256b
+# dpd/dq      xmm       xmm      2*64=128b     ymm*       xmm*    4*64=256b
+# qpd/qq      xmm       xmm      2*64=128b     ymm        ymm     4*64=256b
+
+
+
+{
+ICLASS    : VGATHERDPD
+CPL       : 3
+CATEGORY  : AVX2GATHER
+EXTENSION : AVX2GATHER
+ATTRIBUTES : gather DWORD_INDICES ELEMENT_SIZE_q SPECIAL_AGEN_REQUIRED
+EXCEPTIONS: avx-type-12
+
+
+# VL = 256 - when data/mask differ from index size see asterisks in above chart.
+PATTERN : VV1 0x92   VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=YMM_R():crw:qq:f64   MEM0:r:q:f64 REG1=YMM_N():rw:qq:i64
+IFORM: VGATHERDPD_YMMf64_MEMf64_YMMi64_VL256
+
+# VL = 128 - index, mask and dest are all XMMs
+PATTERN : VV1 0x92   VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:dq:f64   MEM0:r:q:f64 REG1=XMM_N():rw:dq:i64
+IFORM: VGATHERDPD_XMMf64_MEMf64_XMMi64_VL128
+
+COMMENT: mask reg is zeroized on normal termination. mask_sz=data_sz
+}
+{
+ICLASS    : VGATHERDPS
+CPL       : 3
+CATEGORY  : AVX2GATHER
+EXTENSION : AVX2GATHER
+ATTRIBUTES : gather DWORD_INDICES ELEMENT_SIZE_d SPECIAL_AGEN_REQUIRED
+EXCEPTIONS: avx-type-12
+
+
+# VL = 256 - when data/mask differ from index size see asterisks in above chart.
+PATTERN : VV1 0x92   VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_YMM() eanot16
+OPERANDS  : REG0=YMM_R():crw:qq:f32   MEM0:r:d:f32 REG1=YMM_N():rw:qq:i32
+IFORM: VGATHERDPS_YMMf32_MEMf32_YMMi32_VL256
+
+# VL = 128 - index, mask and dest are all XMMs
+PATTERN : VV1 0x92   VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:dq:f32   MEM0:r:d:f32 REG1=XMM_N():rw:dq:i32
+IFORM: VGATHERDPS_XMMf32_MEMf32_XMMi32_VL128
+
+COMMENT: mask reg is zeroized on normal termination. mask_sz=data_sz
+}
+{
+ICLASS    : VGATHERQPD
+CPL       : 3
+CATEGORY  : AVX2GATHER
+EXTENSION : AVX2GATHER
+ATTRIBUTES : gather QWORD_INDICES ELEMENT_SIZE_q SPECIAL_AGEN_REQUIRED
+EXCEPTIONS: avx-type-12
+
+# VL = 256 - when data/mask differ from index size see asterisks in above chart.
+PATTERN : VV1 0x93   VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_YMM() eanot16
+OPERANDS  : REG0=YMM_R():crw:qq:f64   MEM0:r:q:f64 REG1=YMM_N():rw:qq:i64
+IFORM: VGATHERQPD_YMMf64_MEMf64_YMMi64_VL256
+
+# VL = 128 - index, mask and dest are all XMMs
+PATTERN : VV1 0x93   VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:dq:f64   MEM0:r:q:f64 REG1=XMM_N():rw:dq:i64
+IFORM: VGATHERQPD_XMMf64_MEMf64_XMMi64_VL128
+
+COMMENT: mask reg is zeroized on normal termination. mask_sz=data_sz
+}
+{
+ICLASS    : VGATHERQPS
+CPL       : 3
+CATEGORY  : AVX2GATHER
+EXTENSION : AVX2GATHER
+ATTRIBUTES : gather QWORD_INDICES ELEMENT_SIZE_d SPECIAL_AGEN_REQUIRED
+EXCEPTIONS: avx-type-12
+
+
+# VL = 256 - when data/mask differ from index size see asterisks in above chart.
+PATTERN : VV1 0x93   VL256 V66 V0F38   W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_YMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:dq:f32   MEM0:r:d:f32 REG1=XMM_N():rw:dq:i32
+IFORM: VGATHERQPS_XMMf32_MEMf32_XMMi32_VL256
+
+# VL = 128 - index, mask and dest are all XMMs
+PATTERN : VV1 0x93   VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:q:f32   MEM0:r:d:f32 REG1=XMM_N():rw:q:i32
+IFORM: VGATHERQPS_XMMf32_MEMf32_XMMi32_VL128
+
+COMMENT: mask reg is zeroized on normal termination. mask_sz=data_sz
+}
+
+{
+ICLASS    : VPGATHERDQ
+CPL       : 3
+CATEGORY  : AVX2GATHER
+EXTENSION : AVX2GATHER
+ATTRIBUTES : gather DWORD_INDICES ELEMENT_SIZE_q SPECIAL_AGEN_REQUIRED
+EXCEPTIONS: avx-type-12
+
+# VL = 256 - when data/mask differ from index size see asterisks in above chart.
+PATTERN : VV1 0x90   VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=YMM_R():crw:qq:u64   MEM0:r:q:u64 REG1=YMM_N():rw:qq:i64
+IFORM: VPGATHERDQ_YMMu64_MEMq_YMMi64_VL256
+
+# VL = 128 - index, mask and dest are all XMMs
+PATTERN : VV1 0x90   VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:dq:u64   MEM0:r:q:u64 REG1=XMM_N():rw:dq:i64
+IFORM: VPGATHERDQ_XMMu64_MEMq_XMMi64_VL128
+
+COMMENT: mask reg is zeroized on normal termination. mask_sz=data_sz
+}
+{
+ICLASS    : VPGATHERDD
+CPL       : 3
+CATEGORY  : AVX2GATHER
+EXTENSION : AVX2GATHER
+ATTRIBUTES : gather DWORD_INDICES ELEMENT_SIZE_d SPECIAL_AGEN_REQUIRED
+EXCEPTIONS: avx-type-12
+
+# VL = 256 - when data/mask differ from index size see asterisks in above chart.
+PATTERN : VV1 0x90   VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_YMM() eanot16
+OPERANDS  : REG0=YMM_R():crw:qq:u32   MEM0:r:d:u32 REG1=YMM_N():rw:qq:i32
+IFORM: VPGATHERDD_YMMu32_MEMd_YMMi32_VL256
+
+# VL = 128 - index, mask and dest are all XMMs
+PATTERN : VV1 0x90   VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:dq:u32   MEM0:r:d:u32 REG1=XMM_N():rw:dq:i32
+IFORM: VPGATHERDD_XMMu32_MEMd_XMMi32_VL128
+
+COMMENT: mask reg is zeroized on normal termination. mask_sz=data_sz
+}
+{
+ICLASS    : VPGATHERQQ
+CPL       : 3
+CATEGORY  : AVX2GATHER
+EXTENSION : AVX2GATHER
+ATTRIBUTES : gather QWORD_INDICES ELEMENT_SIZE_q SPECIAL_AGEN_REQUIRED
+EXCEPTIONS: avx-type-12
+
+# VL = 256 - when data/mask differ from index size see asterisks in above chart.
+PATTERN : VV1 0x91   VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_YMM() eanot16
+OPERANDS  : REG0=YMM_R():crw:qq:u64   MEM0:r:q:u64 REG1=YMM_N():rw:qq:i64
+IFORM: VPGATHERQQ_YMMu64_MEMq_YMMi64_VL256
+
+# VL = 128 - index, mask and dest are all XMMs
+PATTERN : VV1 0x91   VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:dq:u64   MEM0:r:q:u64 REG1=XMM_N():rw:dq:i64
+IFORM: VPGATHERQQ_XMMu64_MEMq_XMMi64_VL128
+
+COMMENT: mask reg is zeroized on normal termination. mask_sz=data_sz
+}
+{
+ICLASS    : VPGATHERQD
+CPL       : 3
+CATEGORY  : AVX2GATHER
+EXTENSION : AVX2GATHER
+ATTRIBUTES : gather QWORD_INDICES ELEMENT_SIZE_d SPECIAL_AGEN_REQUIRED
+EXCEPTIONS: avx-type-12
+
+# VL = 256 - when data/mask differ from index size see asterisks in above chart.
+PATTERN : VV1 0x91   VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_YMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:dq:u32   MEM0:r:d:u32 REG1=XMM_N():rw:dq:i32
+IFORM: VPGATHERQD_XMMu32_MEMd_XMMi32_VL256
+
+# VL = 128 - index, mask and dest are all XMMs
+PATTERN : VV1 0x91   VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] RM=4 VMODRM_XMM() eanot16
+OPERANDS  : REG0=XMM_R():crw:q:u32   MEM0:r:d:u32 REG1=XMM_N():rw:q:i32
+IFORM: VPGATHERQD_XMMu32_MEMd_XMMi32_VL128
+
+COMMENT: mask reg is zeroized on normal termination. mask_sz=data_sz
+}
+
+
+
+###FILE: ./datafiles/avxhsw/hsw-int256-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+
+
+{
+ICLASS    : VPABSB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x1C   VL256 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 MEM0:r:qq:i8
+
+PATTERN : VV1 0x1C   VL256 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8  REG1=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPABSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x1D   VL256 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x1D   VL256 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16  REG1=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPABSD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x1E   VL256 V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x1E   VL256 V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32  REG1=YMM_B():r:qq:i32
+}
+
+
+
+
+
+
+
+
+
+{
+ICLASS    : VPACKSSWB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x63  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x63  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPACKSSDW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x6B  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x6B  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+{
+ICLASS    : VPACKUSWB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x67  V66 V0F VL256  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x67  V66 V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPACKUSDW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x2B  V66 V0F38 VL256  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x2B  V66 V0F38 VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+
+{
+ICLASS    : VPSLLW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xF1  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:dq:u64
+
+PATTERN : VV1 0xF1  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=XMM_B():r:q:u64
+}
+{
+ICLASS    : VPSLLD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xF2  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 MEM0:r:dq:u64
+
+PATTERN : VV1 0xF2  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 REG2=XMM_B():r:q:u64
+}
+{
+ICLASS    : VPSLLQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xF3  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0xF3  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=XMM_B():r:q:u64
+}
+
+{
+ICLASS    : VPSRLW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xD1  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:dq:u64
+
+PATTERN : VV1 0xD1  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=XMM_B():r:q:u64
+}
+{
+ICLASS    : VPSRLD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xD2  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 MEM0:r:dq:u64
+
+PATTERN : VV1 0xD2  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 REG2=XMM_B():r:q:u64
+}
+{
+ICLASS    : VPSRLQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xD3  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:dq:u64
+
+PATTERN : VV1 0xD3  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=XMM_B():r:q:u64
+}
+
+{
+ICLASS    : VPSRAW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xE1  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:dq:u64
+
+PATTERN : VV1 0xE1  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=XMM_B():r:q:u64
+}
+{
+ICLASS    : VPSRAD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xE2  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:dq:u64
+
+PATTERN : VV1 0xE2  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=XMM_B():r:q:u64
+}
+
+
+{
+ICLASS    : VPADDB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xFC  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 MEM0:r:qq:i8
+
+PATTERN : VV1 0xFC  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 REG2=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPADDW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xFD  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xFD  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPADDD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xFE  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0xFE  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+{
+ICLASS    : VPADDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xD4  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i64 REG1=YMM_N():r:qq:i64 MEM0:r:qq:i64
+
+PATTERN : VV1 0xD4  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i64 REG1=YMM_N():r:qq:i64 REG2=YMM_B():r:qq:i64
+}
+
+{
+ICLASS    : VPADDSB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xEC  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 MEM0:r:qq:i8
+
+PATTERN : VV1 0xEC  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 REG2=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPADDSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xED  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xED  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+
+{
+ICLASS    : VPADDUSB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xDC  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0xDC  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPADDUSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xDD  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0xDD  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+
+{
+ICLASS    : VPAVGB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xE0  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0xE0  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPAVGW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xE3  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0xE3  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+
+
+{
+ICLASS    : VPCMPEQB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x74  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0x74  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPCMPEQW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x75  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0x75  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+{
+ICLASS    : VPCMPEQD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x76  V66 V0F VL256  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 MEM0:r:qq:u32
+
+PATTERN : VV1 0x76  V66 V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 REG2=YMM_B():r:qq:u32
+}
+{
+ICLASS    : VPCMPEQQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x29  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:qq:u64
+
+PATTERN : VV1 0x29  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=YMM_B():r:qq:u64
+}
+
+{
+ICLASS    : VPCMPGTB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x64  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 MEM0:r:qq:i8
+
+PATTERN : VV1 0x64  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 REG2=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPCMPGTW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x65  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x65  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPCMPGTD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x66  V66 V0F VL256  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x66  V66 V0F VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+{
+ICLASS    : VPCMPGTQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x37  V66 V0F38 VL256  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i64 REG1=YMM_N():r:qq:i64 MEM0:r:qq:i64
+
+PATTERN : VV1 0x37  V66 V0F38 VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i64 REG1=YMM_N():r:qq:i64 REG2=YMM_B():r:qq:i64
+}
+
+
+{
+ICLASS    : VPHADDW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x01  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x01  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPHADDD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x02  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x02  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+{
+ICLASS    : VPHADDSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x03  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x03  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPHSUBW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x05  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x05  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPHSUBD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x06  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x06  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+{
+ICLASS    : VPHSUBSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x07  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x07  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+
+{
+ICLASS    : VPMADDWD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xF5  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xF5  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPMADDUBSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x04  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:u8 MEM0:r:qq:i8
+
+PATTERN : VV1 0x04  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:i8
+}
+
+{
+ICLASS    : VPMAXSB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x3C  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 MEM0:r:qq:i8
+
+PATTERN : VV1 0x3C  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 REG2=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPMAXSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xEE  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xEE  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPMAXSD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x3D  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x3D  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+
+{
+ICLASS    : VPMAXUB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xDE  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0xDE  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPMAXUW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x3E  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0x3E  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+{
+ICLASS    : VPMAXUD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x3F  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 MEM0:r:qq:u32
+
+PATTERN : VV1 0x3F  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 REG2=YMM_B():r:qq:u32
+}
+
+{
+ICLASS    : VPMINSB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x38  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 MEM0:r:qq:i8
+
+PATTERN : VV1 0x38  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 REG2=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPMINSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xEA  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xEA  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPMINSD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x39  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x39  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+
+{
+ICLASS    : VPMINUB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xDA  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0xDA  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPMINUW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x3A  V66 V0F38 VL256  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0x3A  V66 V0F38 VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+{
+ICLASS    : VPMINUD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x3B  V66 V0F38 VL256  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 MEM0:r:qq:u32
+
+PATTERN : VV1 0x3B  V66 V0F38 VL256 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 REG2=YMM_B():r:qq:u32
+}
+
+{
+ICLASS    : VPMULHUW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xE4  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0xE4  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+{
+ICLASS    : VPMULHRSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x0B  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x0B  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+
+{
+ICLASS    : VPMULHW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xE5  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xE5  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPMULLW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xD5  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xD5  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPMULLD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x40  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x40  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+
+{
+ICLASS    : VPMULUDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xF4  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u32 MEM0:r:qq:u32
+
+PATTERN : VV1 0xF4  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u32 REG2=YMM_B():r:qq:u32
+}
+{
+ICLASS    : VPMULDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x28  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i64 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x28  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i64 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+
+{
+ICLASS    : VPSADBW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xF6  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0xF6  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPSHUFB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x00  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0x00  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+
+{
+ICLASS    : VPSIGNB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x08  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 MEM0:r:qq:i8
+
+PATTERN : VV1 0x08  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 REG2=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPSIGNW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x09  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0x09  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPSIGND
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x0A  VL256 V66 V0F38  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0x0A  VL256 V66 V0F38 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+
+
+{
+ICLASS    : VPSUBSB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xE8  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 MEM0:r:qq:i8
+
+PATTERN : VV1 0xE8  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 REG2=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPSUBSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xE9  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xE9  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+
+{
+ICLASS    : VPSUBUSB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xD8  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0xD8  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPSUBUSW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xD9  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0xD9  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+
+{
+ICLASS    : VPSUBB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xF8  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 MEM0:r:qq:i8
+
+PATTERN : VV1 0xF8  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i8 REG1=YMM_N():r:qq:i8 REG2=YMM_B():r:qq:i8
+}
+{
+ICLASS    : VPSUBW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xF9  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 MEM0:r:qq:i16
+
+PATTERN : VV1 0xF9  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16 REG1=YMM_N():r:qq:i16 REG2=YMM_B():r:qq:i16
+}
+{
+ICLASS    : VPSUBD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xFA  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 MEM0:r:qq:i32
+
+PATTERN : VV1 0xFA  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32 REG1=YMM_N():r:qq:i32 REG2=YMM_B():r:qq:i32
+}
+{
+ICLASS    : VPSUBQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xFB  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i64 REG1=YMM_N():r:qq:i64 MEM0:r:qq:i64
+
+PATTERN : VV1 0xFB  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i64 REG1=YMM_N():r:qq:i64 REG2=YMM_B():r:qq:i64
+}
+
+{
+ICLASS    : VPUNPCKHBW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x68  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0x68  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPUNPCKHWD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x69  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0x69  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+{
+ICLASS    : VPUNPCKHDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x6A  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 MEM0:r:qq:u32
+
+PATTERN : VV1 0x6A  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 REG2=YMM_B():r:qq:u32
+}
+{
+ICLASS    : VPUNPCKHQDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x6D  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:qq:u64
+
+PATTERN : VV1 0x6D  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=YMM_B():r:qq:u64
+}
+
+{
+ICLASS    : VPUNPCKLBW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x60  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+
+PATTERN : VV1 0x60  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+}
+{
+ICLASS    : VPUNPCKLWD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x61  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16
+
+PATTERN : VV1 0x61  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16
+}
+{
+ICLASS    : VPUNPCKLDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x62  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 MEM0:r:qq:u32
+
+PATTERN : VV1 0x62  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_N():r:qq:u32 REG2=YMM_B():r:qq:u32
+}
+{
+ICLASS    : VPUNPCKLQDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x6C  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 MEM0:r:qq:u64
+
+PATTERN : VV1 0x6C  VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_N():r:qq:u64 REG2=YMM_B():r:qq:u64
+}
+
+
+{
+ICLASS    : VPALIGNR
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x0F  VL256 V66 V0F3A  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8 IMM0:r:b
+
+PATTERN : VV1 0x0F  VL256 V66 V0F3A  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8 IMM0:r:b
+}
+{
+ICLASS    : VPBLENDW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x0E  VL256 V66 V0F3A  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 MEM0:r:qq:u16 IMM0:r:b
+
+PATTERN : VV1 0x0E  VL256 V66 V0F3A  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u16 REG2=YMM_B():r:qq:u16 IMM0:r:b
+}
+{
+ICLASS    : VMPSADBW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x42  VL256 V66 V0F3A  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8 IMM0:r:b
+
+PATTERN : VV1 0x42  VL256 V66 V0F3A  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8 IMM0:r:b
+}
+
+
+
+{
+ICLASS    : VPOR
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xEB  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u256 REG1=YMM_N():r:qq:u256 MEM0:r:qq:u256
+
+PATTERN : VV1 0xEB   VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u256 REG1=YMM_N():r:qq:u256 REG2=YMM_B():r:qq:u256
+}
+{
+ICLASS    : VPAND
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xDB  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u256 REG1=YMM_N():r:qq:u256 MEM0:r:qq:u256
+
+PATTERN : VV1 0xDB   VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u256 REG1=YMM_N():r:qq:u256 REG2=YMM_B():r:qq:u256
+}
+{
+ICLASS    : VPANDN
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xDF  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u256 REG1=YMM_N():r:qq:u256 MEM0:r:qq:u256
+
+PATTERN : VV1 0xDF   VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u256 REG1=YMM_N():r:qq:u256 REG2=YMM_B():r:qq:u256
+}
+{
+ICLASS    : VPXOR
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0xEF  VL256 V66 V0F  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u256 REG1=YMM_N():r:qq:u256 MEM0:r:qq:u256
+
+PATTERN : VV1 0xEF   VL256 V66 V0F MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u256 REG1=YMM_N():r:qq:u256 REG2=YMM_B():r:qq:u256
+}
+
+
+
+{
+ICLASS    : VPBLENDVB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x4C   VL256 V66 V0F3A norexw_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8 REG2=YMM_SE():r:qq:u8
+
+PATTERN : VV1 0x4C   VL256 V66 V0F3A norexw_prefix MOD[0b11] MOD=3 REG[rrr] RM[nnn] SE_IMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8 REG3=YMM_SE():r:qq:u8
+}
+
+
+
+
+{
+ICLASS    : VPMOVMSKB
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0xD7  VL256 V66 V0F  NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPR32_R():w:d:u32   REG1=YMM_B():r:qq:i8
+}
+
+
+
+{
+ICLASS    : VPSHUFD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x70   VL256 V66 V0F NOVSR   MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u32 MEM0:r:qq:u32  IMM0:r:b
+
+PATTERN : VV1 0x70   VL256 V66 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u32 REG1=YMM_B():r:qq:u32 IMM0:r:b
+}
+{
+ICLASS    : VPSHUFHW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x70   VL256 VF3 V0F NOVSR   MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u16 MEM0:r:qq:u16  IMM0:r:b
+
+PATTERN : VV1 0x70   VL256 VF3 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_B():r:qq:u16 IMM0:r:b
+}
+{
+ICLASS    : VPSHUFLW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x70   VL256 VF2 V0F NOVSR   MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u16 MEM0:r:qq:u16  IMM0:r:b
+
+PATTERN : VV1 0x70   VL256 VF2 V0F NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u16 REG1=YMM_B():r:qq:u16 IMM0:r:b
+}
+
+
+
+{
+ICLASS    : VPSRLDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x73  VL256 V66 V0F   MOD[0b11] MOD=3 REG[0b011] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:u128 REG1=YMM_B():r:qq:u128 IMM0:r:b   # NDD
+}
+{
+ICLASS    : VPSLLDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x73  VL256 V66 V0F   MOD[0b11] MOD=3 REG[0b111] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:u128 REG1=YMM_B():r:qq:u128 IMM0:r:b   # NDD
+}
+
+##############################################
+
+{
+ICLASS    : VPSLLW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x71   VL256  V66 V0F MOD[0b11] MOD=3 REG[0b110] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:u16 REG1=YMM_B():r:qq:u16 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSLLD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x72   VL256  V66 V0F MOD[0b11] MOD=3 REG[0b110] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:u32 REG1=YMM_B():r:qq:u32 IMM0:r:b  #NDD
+}
+{
+ICLASS    : VPSLLQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x73   VL256  V66 V0F MOD[0b11] MOD=3 REG[0b110] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:u64 REG1=YMM_B():r:qq:u64 IMM0:r:b # NDD
+}
+
+{
+ICLASS    : VPSRAW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x71   VL256  V66 V0F MOD[0b11] MOD=3 REG[0b100] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:i16 REG1=YMM_B():r:qq:i16 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSRAD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x72   VL256  V66 V0F MOD[0b11] MOD=3 REG[0b100] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:i32 REG1=YMM_B():r:qq:i32 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSRLW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x71   VL256  V66 V0F MOD[0b11] MOD=3 REG[0b010] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:u16 REG1=YMM_B():r:qq:u16 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSRLD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+
+PATTERN : VV1 0x72   VL256  V66 V0F MOD[0b11] MOD=3 REG[0b010] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:u32 REG1=YMM_B():r:qq:u32 IMM0:r:b # NDD
+}
+{
+ICLASS    : VPSRLQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-7
+PATTERN : VV1 0x73   VL256  V66 V0F MOD[0b11] MOD=3 REG[0b010] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_N():w:qq:u64 REG1=YMM_B():r:qq:u64 IMM0:r:b  # NDD
+}
+
+
+
+############################################################################
+# SX versions
+############################################################################
+
+{
+ICLASS    : VPMOVSXBW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x20   VL256  V66 V0F38 NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i16   REG1=XMM_B():r:dq:i8
+PATTERN : VV1 0x20   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i16   MEM0:r:dq:i8
+}
+
+############################################################################
+{
+ICLASS    : VPMOVSXBD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x21   VL256  V66 V0F38 NOVSR  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32   REG1=XMM_B():r:q:i8
+PATTERN : VV1 0x21   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32   MEM0:r:q:i8
+}
+############################################################################
+{
+ICLASS    : VPMOVSXBQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x22   VL256  V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i64   REG1=XMM_B():r:d:i8
+PATTERN : VV1 0x22   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i64   MEM0:r:d:i8
+}
+############################################################################
+{
+ICLASS    : VPMOVSXWD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x23   VL256  V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i32   REG1=XMM_B():r:dq:i16
+PATTERN : VV1 0x23   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i32   MEM0:r:dq:i16
+}
+############################################################################
+{
+ICLASS    : VPMOVSXWQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x24   VL256  V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i64   REG1=XMM_B():r:q:i16
+PATTERN : VV1 0x24   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i64   MEM0:r:q:i16
+}
+############################################################################
+{
+ICLASS    : VPMOVSXDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x25   VL256  V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:i64   REG1=XMM_B():r:dq:i32
+PATTERN : VV1 0x25   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:i64   MEM0:r:dq:i32
+}
+
+
+
+
+
+############################################################################
+# ZX versions
+############################################################################
+
+{
+ICLASS    : VPMOVZXBW
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x30   VL256  V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16   REG1=XMM_B():r:dq:u8
+PATTERN : VV1 0x30   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16   MEM0:r:dq:u8
+}
+
+############################################################################
+{
+ICLASS    : VPMOVZXBD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x31   VL256  V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32   REG1=XMM_B():r:q:u8
+PATTERN : VV1 0x31   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32   MEM0:r:q:u8
+}
+############################################################################
+{
+ICLASS    : VPMOVZXBQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x32   V66  V0F38 VL256 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64  REG1=XMM_B():r:d:u8
+PATTERN : VV1 0x32   V66  V0F38 VL256 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64   MEM0:r:d:u8
+}
+############################################################################
+{
+ICLASS    : VPMOVZXWD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x33   V66  V0F38 VL256 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32   REG1=XMM_B():r:dq:u16
+PATTERN : VV1 0x33   V66  V0F38 VL256 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32   MEM0:r:dq:u16
+}
+############################################################################
+{
+ICLASS    : VPMOVZXWQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x34   VL256  V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64   REG1=XMM_B():r:q:u16
+PATTERN : VV1 0x34   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64   MEM0:r:q:u16
+}
+############################################################################
+{
+ICLASS    : VPMOVZXDQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-5
+PATTERN : VV1 0x35   VL256  V66 V0F38 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64   REG1=XMM_B():r:dq:u32
+PATTERN : VV1 0x35   VL256  V66 V0F38 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64   MEM0:r:dq:u32
+}
+
+
+##################################
+# newer stuff 2009-08-14
+
+
+{
+ICLASS    : VINSERTI128
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+PATTERN : VV1 0x38  VL256 V66 V0F3A W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 MEM0:r:dq:u128 IMM0:r:b
+
+PATTERN : VV1 0x38  VL256 V66 V0F3A W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 REG2=XMM_B():r:dq:u128 IMM0:r:b
+}
+
+
+
+
+
+{
+ICLASS    : VEXTRACTI128
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+PATTERN : VV1 0x39  VL256 V66 V0F3A W0  NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : MEM0:w:dq:u128 REG0=YMM_R():r:qq:u128  IMM0:r:b
+
+PATTERN : VV1 0x39  VL256 V66 V0F3A W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_B():w:dq:u128 REG1=YMM_R():r:qq:u128  IMM0:r:b
+}
+
+
+###########################################################################
+
+### # VPMASKMOVD  masked load and store
+### # VPMASKMOVQ  masked load and store
+
+
+
+
+{
+ICLASS    : VPMASKMOVD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+ATTRIBUTES: maskop
+EXCEPTIONS: avx-type-6
+PATTERN : VV1 0x8C  VL128 V66 V0F38 W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : REG0=XMM_R():w:dq:u32  REG1=XMM_N():r:dq:u32  MEM0:r:dq:u32
+
+
+PATTERN : VV1 0x8C  VL256 V66 V0F38 W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : REG0=YMM_R():w:qq:u32  REG1=YMM_N():r:qq:u32  MEM0:r:qq:u32
+}
+{
+ICLASS    : VPMASKMOVQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+ATTRIBUTES: maskop
+EXCEPTIONS: avx-type-6
+
+PATTERN : VV1 0x8C  VL128 V66 V0F38 W1  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : REG0=XMM_R():w:dq:u64  REG1=XMM_N():r:dq:u64  MEM0:r:dq:u64
+
+
+PATTERN : VV1 0x8C  VL256 V66 V0F38 W1  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : REG0=YMM_R():w:qq:u64  REG1=YMM_N():r:qq:u64  MEM0:r:qq:u64
+}
+
+{
+ICLASS    : VPMASKMOVD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+ATTRIBUTES: maskop
+EXCEPTIONS: avx-type-6
+PATTERN : VV1 0x8E  VL128 V66 V0F38 W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS :  MEM0:w:dq:u32  REG0=XMM_N():r:dq:u32  REG1=XMM_R():r:dq:u32
+
+
+PATTERN : VV1 0x8E  VL256 V66 V0F38 W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : MEM0:w:qq:u32  REG0=YMM_N():r:qq:u32  REG1=YMM_R():r:qq:u32
+}
+{
+ICLASS    : VPMASKMOVQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+ATTRIBUTES: maskop
+EXCEPTIONS: avx-type-6
+PATTERN : VV1 0x8E  VL128 V66 V0F38 W1  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS :  MEM0:w:dq:u64  REG0=XMM_N():r:dq:u64  REG1=XMM_R():r:dq:u64
+
+
+PATTERN : VV1 0x8E  VL256 V66 V0F38 W1  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : MEM0:w:qq:u64  REG0=YMM_N():r:qq:u64  REG1=YMM_R():r:qq:u64
+}
+###########################################################################
+
+
+### # VPERM2I128 256b only
+
+{
+ICLASS    : VPERM2I128
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6 # Note: vperm2f128 is type 4...
+
+PATTERN : VV1 0x46  VL256 V66 V0F3A W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u128  REG1=YMM_N():r:qq:u128  MEM0:r:qq:u128         IMM0:r:b
+
+PATTERN : VV1 0x46  VL256 V66 V0F3A W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u128  REG1=YMM_N():r:qq:u128  REG2=YMM_B():r:qq:u128 IMM0:r:b
+}
+
+
+{
+ICLASS    : VPERMQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+
+PATTERN : VV1 0x00 VL256 V0F3A V66 W1 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u64 MEM0:r:qq:u64  IMM0:r:b
+
+PATTERN : VV1 0x00 VL256 V0F3A V66 W1 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u64 REG1=YMM_B():r:qq:u64 IMM0:r:b
+}
+
+{
+ICLASS    : VPERMPD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+
+PATTERN : VV1 0x01 VL256 V0F3A V66 W1 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 MEM0:r:qq:f64  IMM0:r:b
+
+PATTERN : VV1 0x01 VL256 V0F3A V66 W1 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:f64 REG1=YMM_B():r:qq:f64 IMM0:r:b
+}
+
+
+
+
+
+
+
+
+{
+ICLASS    : VPERMD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+
+
+PATTERN : VV1 0x36  VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : REG0=YMM_R():w:qq:u32  REG1=YMM_N():r:qq:u32  MEM0:r:qq:u32
+
+PATTERN : VV1 0x36  VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS : REG0=YMM_R():w:qq:u32  REG1=YMM_N():r:qq:u32  REG2=YMM_B():r:qq:u32
+}
+{
+ICLASS    : VPERMPS
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+
+PATTERN : VV1 0x16  VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS : REG0=YMM_R():w:qq:f32  REG1=YMM_N():r:qq:f32  MEM0:r:qq:f32
+
+PATTERN : VV1 0x16  VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS : REG0=YMM_R():w:qq:f32  REG1=YMM_N():r:qq:f32  REG2=YMM_B():r:qq:f32
+}
+
+
+###########################################################################
+
+
+### # VPBLENDD imm 128/256
+
+
+
+{
+ICLASS    : VPBLENDD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+
+PATTERN : VV1 0x02  VL128 V66 V0F3A W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u32  REG1=XMM_N():r:dq:u32  MEM0:r:dq:u32         IMM0:r:b
+
+PATTERN : VV1 0x02  VL128 V66 V0F3A W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=XMM_R():w:dq:u32  REG1=XMM_N():r:dq:u32  REG2=XMM_B():r:dq:u32 IMM0:r:b
+
+
+PATTERN : VV1 0x02  VL256 V66 V0F3A W0  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u32  REG1=YMM_N():r:qq:u32  MEM0:r:qq:u32         IMM0:r:b
+
+PATTERN : VV1 0x02  VL256 V66 V0F3A W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8()
+OPERANDS  : REG0=YMM_R():w:qq:u32  REG1=YMM_N():r:qq:u32  REG2=YMM_B():r:qq:u32 IMM0:r:b
+}
+
+
+
+###########################################################################
+
+{
+ICLASS    : VPBROADCASTB
+COMMENT : gpr 128/256
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+
+PATTERN : VV1 0x78 VL128 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u8 MEM0:r:b:u8 EMX_BROADCAST_1TO16_8
+
+PATTERN : VV1 0x78 VL128 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u8  REG1=XMM_B():r:b:u8 EMX_BROADCAST_1TO16_8
+
+PATTERN : VV1 0x78 VL256 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u8 MEM0:r:b:u8 EMX_BROADCAST_1TO32_8
+
+PATTERN : VV1 0x78 VL256 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u8  REG1=XMM_B():r:b:u8 EMX_BROADCAST_1TO32_8
+
+}
+
+
+
+
+{
+ICLASS    : VPBROADCASTW
+COMMENT : gpr 128/256
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+
+PATTERN : VV1 0x79 VL128 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u16 MEM0:r:w:u16 EMX_BROADCAST_1TO8_16
+
+PATTERN : VV1 0x79 VL128 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u16  REG1=XMM_B():r:w:u16  EMX_BROADCAST_1TO8_16
+
+PATTERN : VV1 0x79 VL256 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u16 MEM0:r:w:u16 EMX_BROADCAST_1TO16_16
+
+PATTERN : VV1 0x79 VL256 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u16  REG1=XMM_B():r:w:u16 EMX_BROADCAST_1TO16_16
+}
+
+
+
+
+### # VPBROADCASTD gpr/mem
+
+
+{
+ICLASS    : VPBROADCASTD
+COMMENT : gpr 128/256
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+
+PATTERN : VV1 0x58 VL128 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u32 MEM0:r:d:u32 EMX_BROADCAST_1TO4_32
+
+PATTERN : VV1 0x58 VL128 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u32  REG1=XMM_B():r:d:u32  EMX_BROADCAST_1TO4_32
+
+
+PATTERN : VV1 0x58 VL256 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u32 MEM0:r:d:u32  EMX_BROADCAST_1TO8_32
+
+PATTERN : VV1 0x58 VL256 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u32  REG1=XMM_B():r:d:u32 EMX_BROADCAST_1TO8_32
+}
+
+
+
+### # VPBROADCASTQ gpr/mem
+
+{
+ICLASS    : VPBROADCASTQ
+COMMENT : gpr 128/256
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+
+PATTERN : VV1 0x59 VL128 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq:u64 MEM0:r:q:u64 EMX_BROADCAST_1TO2_64
+
+PATTERN : VV1 0x59 VL128 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:u64  REG1=XMM_B():r:q:u64  EMX_BROADCAST_1TO2_64
+
+PATTERN : VV1 0x59 VL256 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u64 MEM0:r:q:u64 EMX_BROADCAST_1TO4_64
+
+PATTERN : VV1 0x59 VL256 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:u64  REG1=XMM_B():r:q:u64  EMX_BROADCAST_1TO4_64
+}
+
+
+
+
+
+
+{
+ICLASS    : VBROADCASTSS
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+COMMENT   : xmm,xmm and ymm,xmm
+PATTERN : VV1 0x18  VL128 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq:f32  REG1=XMM_B():r:dq:f32 EMX_BROADCAST_1TO4_32
+
+PATTERN : VV1 0x18  VL256 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f32  REG1=XMM_B():r:dq:f32 EMX_BROADCAST_1TO8_32
+}
+
+
+{
+ICLASS    : VBROADCASTSD
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+COMMENT   : ymm,xmm only
+PATTERN : VV1 0x19  VL256 V66 V0F38 W0 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq:f64  REG1=XMM_B():r:dq:f64 EMX_BROADCAST_1TO4_64
+}
+
+
+
+{
+ICLASS    : VBROADCASTI128
+CPL       : 3
+CATEGORY  : BROADCAST
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-6
+COMMENT : memonly 256  -- FIXME: make types u64 like in AVX1?
+PATTERN : VV1 0x5A VL256 V66 V0F38 W0 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq:u128  MEM0:r:dq:u128 EMX_BROADCAST_2TO4_64
+}
+
+
+###FILE: ./datafiles/avxhsw/hsw-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : TZCNT
+CPL       : 3
+CATEGORY  : BMI1
+EXTENSION : BMI1
+FLAGS     : MUST [ of-u sf-u zf-mod af-u pf-u cf-mod ]
+PATTERN   : 0x0F 0xBC refining_f3  TZCNT=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPRv_R():w MEM0:r:v
+
+PATTERN   : 0x0F 0xBC refining_f3 TZCNT=1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPRv_R():w  REG1=GPRv_B():r
+}
+
+{
+ICLASS    : BSF
+VERSION   : 1
+COMMENT   : AMD reused 0FBC for TZCNT and made BSF not have a refining prefix.  This version replaces the normal version of BSF
+CPL       : 3
+CATEGORY  : BITBYTE
+EXTENSION : BASE
+ISA_SET   : I386
+FLAGS     : MUST [ of-u sf-u zf-mod af-u pf-u cf-u ]
+
+PATTERN   : 0x0F 0xBC not_refining_f3 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPRv_R():cw MEM0:r:v
+
+PATTERN   : 0x0F 0xBC not_refining_f3 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPRv_R():cw REG1=GPRv_B():r
+
+PATTERN   : 0x0F 0xBC refining_f3 TZCNT=0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPRv_R():cw MEM0:r:v
+
+PATTERN   : 0x0F 0xBC refining_f3 TZCNT=0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPRv_R():cw REG1=GPRv_B():r
+}
+
+{
+ICLASS    : INVPCID
+CPL       : 0
+CATEGORY  : MISC
+EXTENSION : INVPCID
+ISA_SET   : INVPCID
+ATTRIBUTES : RING0 NOTSX
+PATTERN   : 0x0F 0x38 0x82 osz_refining_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn]  REFINING66() mode64 MODRM() CR_WIDTH()
+OPERANDS  : REG0=GPR64_R():r MEM0:r:dq
+PATTERN   : 0x0F 0x38 0x82 osz_refining_prefix MOD[mm] MOD!=3 REG[rrr] RM[nnn]  REFINING66() not64 MODRM() CR_WIDTH()
+OPERANDS  : REG0=GPR32_R():r MEM0:r:dq
+COMMENT   :
+}
+
+
+###FILE: ./datafiles/avxhsw/hsw-lzcnt.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+# LZCNT reg16, reg/mem16 F30FBD /r
+# LZCNT reg32, reg/mem32 F30FBD /r
+# LZCNT reg64, reg/mem64 F30FBD /r
+
+{
+ICLASS    : LZCNT
+# This replace the AMD version in LZCNT builds
+VERSION   : 2
+CPL       : 3
+CATEGORY  : LZCNT
+EXTENSION : LZCNT
+COMMENT:  : These next one WAS introduced first by AMD circa SSE4a.
+FLAGS     : MUST [ cf-mod zf-mod of-u af-u pf-u sf-u ]
+PATTERN   : 0x0F 0xBD f3_refining_prefix LZCNT=1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPRv_R():w:v     MEM0:r:v
+PATTERN   : 0x0F 0xBD f3_refining_prefix LZCNT=1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPRv_R():w:v     REG1=GPRv_B():r:v
+}
+
+
+{
+ICLASS    : BSR
+VERSION   : 2
+COMMENT   : AMD reused 0FBD for LZCNT and made BSR not have a refining prefix.  This version replaces the normal version of BSR
+CPL       : 3
+CATEGORY  : BITBYTE
+EXTENSION : BASE
+ISA_SET   : I386
+FLAGS     : MUST [ of-u sf-u zf-mod af-u pf-u cf-u ]
+PATTERN   : 0x0F 0xBD not_refining_f3 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPRv_R():cw MEM0:r:v
+
+PATTERN   : 0x0F 0xBD not_refining_f3 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPRv_R():cw REG1=GPRv_B():r
+
+PATTERN   : 0x0F 0xBD  refining_f3 LZCNT=0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=GPRv_R():cw MEM0:r:v
+
+PATTERN   : 0x0F 0xBD  refining_f3 LZCNT=0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=GPRv_R():cw REG1=GPRv_B():r
+}
+
+
+###FILE: ./datafiles/avxhsw/hsw-vex-gpr-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+AVX_INSTRUCTIONS()::
+
+{
+ICLASS    : PDEP
+CPL       : 3
+CATEGORY  : BMI2
+EXTENSION : BMI2
+
+#32b
+PATTERN   : VV1 0xF5 V0F38 VF2 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d MEM0:r:d
+
+PATTERN   : VV1 0xF5 V0F38 VF2 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d MEM0:r:d
+
+PATTERN   : VV1 0xF5 V0F38 VF2 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d
+
+PATTERN   : VV1 0xF5 V0F38 VF2 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d
+
+# 64b
+PATTERN   : VV1 0xF5 V0F38 VF2 W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():rw:q REG1=VGPR64_N():r:q MEM0:r:q
+
+PATTERN   : VV1 0xF5 V0F38 VF2 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():rw:q REG1=VGPR64_N():r:q REG2=VGPR64_B():r:q
+}
+
+{
+ICLASS    : PEXT
+CPL       : 3
+CATEGORY  : BMI2
+EXTENSION : BMI2
+
+
+#32b
+PATTERN   : VV1 0xF5 V0F38 VF3 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d MEM0:r:d
+
+PATTERN   : VV1 0xF5 V0F38 VF3 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d MEM0:r:d
+
+PATTERN   : VV1 0xF5 V0F38 VF3 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d
+
+PATTERN   : VV1 0xF5 V0F38 VF3 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d
+
+# 64b
+PATTERN   : VV1 0xF5 V0F38 VF3 W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():rw:q REG1=VGPR64_N():r:q MEM0:r:q
+
+PATTERN   : VV1 0xF5 V0F38 VF3 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():rw:q REG1=VGPR64_N():r:q REG2=VGPR64_B():r:q
+}
+
+
+{
+ICLASS    : ANDN
+CPL       : 3
+CATEGORY  : BMI1
+EXTENSION : BMI1
+FLAGS     : MUST [ of-0 sf-mod zf-mod af-u pf-u cf-0 ]
+
+# 32b
+PATTERN   : VV1 0xF2 V0F38 VNP  not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d MEM0:r:d
+
+PATTERN   : VV1 0xF2 V0F38 VNP  W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d MEM0:r:d
+
+PATTERN   : VV1 0xF2 V0F38 VNP  not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d
+
+PATTERN   : VV1 0xF2 V0F38 VNP  W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():rw:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d
+
+# 64b
+PATTERN   : VV1 0xF2 V0F38 VNP W1 VL128  mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():rw:q REG1=VGPR64_N():r:q MEM0:r:q
+
+PATTERN   : VV1 0xF2 V0F38 VNP W1 VL128  mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():rw:q REG1=VGPR64_N():r:q REG2=VGPR64_B():r:q
+}
+
+{
+ICLASS    : BLSR
+CPL       : 3
+CATEGORY  : BMI1
+EXTENSION : BMI1
+FLAGS     : MUST [ of-0 sf-mod zf-mod af-u pf-u cf-mod ]
+
+# 32b
+PATTERN   : VV1 0xF3 V0F38 VNP not64 VL128  MOD[mm] MOD!=3 REG[0b001] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_N():w:d MEM0:r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP W0 mode64 VL128  MOD[mm] MOD!=3 REG[0b001] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_N():w:d MEM0:r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP not64 VL128  MOD[0b11] MOD=3 REG[0b001] RM[nnn]
+OPERANDS  : REG0=VGPR32_N():w:d  REG1=VGPR32_B():r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP W0 mode64 VL128  MOD[0b11] MOD=3 REG[0b001] RM[nnn]
+OPERANDS  : REG0=VGPR32_N():w:d  REG1=VGPR32_B():r:d
+
+# 64b
+PATTERN   : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[0b001] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_N():w:q MEM0:r:q
+
+PATTERN   : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[0b001] RM[nnn]
+OPERANDS  : REG0=VGPR64_N():w:q  REG1=VGPR64_B():r:q
+
+}
+
+{
+ICLASS    : BLSMSK
+CPL       : 3
+CATEGORY  : BMI1
+EXTENSION : BMI1
+FLAGS     : MUST [ of-0 sf-mod zf-0 af-u pf-u cf-mod ]
+
+#32b
+PATTERN   : VV1 0xF3 V0F38 VNP not64 VL128 MOD[mm] MOD!=3 REG[0b010] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_N():w:d MEM0:r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[mm] MOD!=3 REG[0b010] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_N():w:d MEM0:r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP not64 VL128 MOD[0b11] MOD=3 REG[0b010] RM[nnn]
+OPERANDS  : REG0=VGPR32_N():w:d  REG1=VGPR32_B():r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[0b11] MOD=3 REG[0b010] RM[nnn]
+OPERANDS  : REG0=VGPR32_N():w:d  REG1=VGPR32_B():r:d
+
+#64b
+PATTERN   : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[0b010] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_N():w:q MEM0:r:q
+
+PATTERN   : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[0b010] RM[nnn]
+OPERANDS  : REG0=VGPR64_N():w:q  REG1=VGPR64_B():r:q
+}
+
+{
+ICLASS    : BLSI
+CPL       : 3
+CATEGORY  : BMI1
+EXTENSION : BMI1
+FLAGS     : MUST [ of-0 sf-mod zf-mod af-u pf-u cf-0 ]
+
+# 32b
+PATTERN   : VV1 0xF3 V0F38 VNP not64 VL128 MOD[mm] MOD!=3 REG[0b011] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_N():w:d MEM0:r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[mm] MOD!=3 REG[0b011] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_N():w:d MEM0:r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP not64 VL128 MOD[0b11] MOD=3 REG[0b011] RM[nnn]
+OPERANDS  : REG0=VGPR32_N():w:d  REG1=VGPR32_B():r:d
+
+PATTERN   : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[0b11] MOD=3 REG[0b011] RM[nnn]
+OPERANDS  : REG0=VGPR32_N():w:d  REG1=VGPR32_B():r:d
+
+# 64b
+PATTERN   : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[0b011] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_N():w:q MEM0:r:q
+
+PATTERN   : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[0b011] RM[nnn]
+OPERANDS  : REG0=VGPR64_N():w:q  REG1=VGPR64_B():r:q
+}
+
+{
+ICLASS    : BZHI
+CPL       : 3
+CATEGORY  : BMI2
+EXTENSION : BMI2
+FLAGS     : MUST [ of-0 sf-mod zf-mod af-u pf-u cf-mod ]
+
+# 32b
+PATTERN   : VV1 0xF5 V0F38 VNP not64 VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF5 V0F38 VNP W0 mode64 VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF5 V0F38 VNP not64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF5 V0F38 VNP W0 mode64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+# 64b
+PATTERN   : VV1 0xF5 V0F38 VNP W1 VL128 mode64  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():w:q  MEM0:r:q REG1=VGPR64_N():r:q
+
+PATTERN   : VV1 0xF5 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q REG2=VGPR64_N():r:q
+}
+
+{
+ICLASS    : BEXTR
+CPL       : 3
+CATEGORY  : BMI1
+EXTENSION : BMI1
+FLAGS     : MUST [ of-u sf-u zf-mod af-u pf-u cf-u ]
+
+# 32b
+PATTERN   : VV1 0xF7 V0F38 VNP not64 VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VNP W0 mode64 VL128  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VNP not64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VNP W0 mode64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+# 64b
+PATTERN   : VV1 0xF7 V0F38 VNP W1 VL128 mode64  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():w:q  MEM0:r:q REG1=VGPR64_N():r:q
+
+PATTERN   : VV1 0xF7 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q REG2=VGPR64_N():r:q
+}
+
+
+
+{
+ICLASS    : SHLX
+CPL       : 3
+CATEGORY  : BMI2
+EXTENSION : BMI2
+
+# 32b
+PATTERN   : VV1 0xF7 V0F38 V66 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 V66 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 V66 not64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 V66 W0 mode64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+# 64b
+PATTERN   : VV1 0xF7 V0F38 V66  W1 VL128 mode64  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():w:q MEM0:r:q REG1=VGPR64_N():r:q
+
+PATTERN   : VV1 0xF7 V0F38 V66  W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q  REG2=VGPR64_N():r:q
+}
+{
+ICLASS    : SARX
+CPL       : 3
+CATEGORY  : BMI2
+EXTENSION : BMI2
+
+# 32b
+PATTERN   : VV1 0xF7 V0F38 VF3 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VF3 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VF3 not64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VF3 W0 mode64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+# 64b
+PATTERN   : VV1 0xF7 V0F38 VF3  W1 VL128 mode64  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():w:q MEM0:r:q REG1=VGPR64_N():r:q
+
+PATTERN   : VV1 0xF7 V0F38 VF3  W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q  REG2=VGPR64_N():r:q
+}
+{
+ICLASS    : SHRX
+CPL       : 3
+CATEGORY  : BMI2
+EXTENSION : BMI2
+
+# 32b
+PATTERN   : VV1 0xF7 V0F38 VF2 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VF2 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VF2 not64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+PATTERN   : VV1 0xF7 V0F38 VF2 W0 mode64 VL128  MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d
+
+# 64b
+PATTERN   : VV1 0xF7 V0F38 VF2  W1 VL128 mode64  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():w:q MEM0:r:q REG1=VGPR64_N():r:q
+
+PATTERN   : VV1 0xF7 V0F38 VF2  W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q  REG2=VGPR64_N():r:q
+}
+
+
+
+{
+ICLASS    : MULX
+CPL       : 3
+CATEGORY  : BMI2
+EXTENSION : BMI2
+
+# reg:w vvvv:w rm:r rdx:r
+# 32b
+PATTERN   : VV1 0xF6 VF2 V0F38 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_N():w:d REG2=VGPR32_B():r:d REG3=XED_REG_EDX:r:SUPP
+
+PATTERN   : VV1 0xF6 VF2 V0F38 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_N():w:d REG2=VGPR32_B():r:d REG3=XED_REG_EDX:r:SUPP
+PATTERN   : VV1 0xF6 VF2 V0F38 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_N():w:d MEM0:r:d  REG2=XED_REG_EDX:r:SUPP
+
+PATTERN   : VV1 0xF6 VF2 V0F38 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_N():w:d MEM0:r:d  REG2=XED_REG_EDX:r:SUPP
+
+# 64b
+PATTERN   : VV1 0xF6 VF2 V0F38 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=VGPR64_R():w:q REG1=VGPR64_N():w:q REG2=VGPR64_B():r:q REG3=XED_REG_RDX:r:SUPP
+PATTERN   : VV1 0xF6 VF2 V0F38 W1 VL128 mode64  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=VGPR64_R():w:q REG1=VGPR64_N():w:q MEM0:r:q REG2=XED_REG_RDX:r:SUPP
+}
+
+{
+ICLASS    : RORX
+CPL       : 3
+CATEGORY  : BMI2
+EXTENSION : BMI2
+
+# reg(w) rm(r) / vvvv must be 1111. / 2010-01-08 CART change
+
+# 32b
+PATTERN   : VV1 0xF0 VF2 V0F3A not64 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]  UIMM8()
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d IMM0:r:b
+
+PATTERN   : VV1 0xF0 VF2 V0F3A W0 mode64 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn]  UIMM8()
+OPERANDS  : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d IMM0:r:b
+PATTERN   : VV1 0xF0 VF2 V0F3A not64 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM() UIMM8()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d IMM0:r:b
+
+PATTERN   : VV1 0xF0 VF2 V0F3A W0 mode64 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM() UIMM8()
+OPERANDS  : REG0=VGPR32_R():w:d MEM0:r:d IMM0:r:b
+
+# 64b
+PATTERN   : VV1 0xF0 VF2 V0F3A W1 VL128 NOVSR mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn]  UIMM8()
+OPERANDS  : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q IMM0:r:b
+PATTERN   : VV1 0xF0 VF2 V0F3A W1 VL128 NOVSR mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM() UIMM8()
+OPERANDS  : REG0=VGPR64_R():w:q MEM0:r:q IMM0:r:b
+}
+
+
+###FILE: ./datafiles/avxhsw/hsw-vshift-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+
+
+
+
+{
+ICLASS    : VPSLLVD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x47  VL128 V0F38 V66  W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x47  VL128 V0F38 V66 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq REG2=XMM_B():r:dq
+
+PATTERN : VV1 0x47  VL256 V0F38 V66  W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x47  VL256 V0F38 V66 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq REG2=YMM_B():r:qq
+
+}
+{
+ICLASS    : VPSLLVQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x47  VL128 V0F38 V66  W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x47  VL128 V0F38 V66 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq REG2=XMM_B():r:dq
+
+PATTERN : VV1 0x47  VL256 V0F38 V66  W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x47  VL256 V0F38 V66 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq REG2=YMM_B():r:qq
+
+}
+
+{
+ICLASS    : VPSRLVD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x45  VL128 V0F38 V66  W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x45  VL128 V0F38 V66 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq REG2=XMM_B():r:dq
+
+PATTERN : VV1 0x45  VL256 V0F38 V66  W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x45  VL256 V0F38 V66 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq REG2=YMM_B():r:qq
+
+}
+{
+ICLASS    : VPSRLVQ
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x45  VL128 V0F38 V66  W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x45  VL128 V0F38 V66 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq REG2=XMM_B():r:dq
+
+PATTERN : VV1 0x45  VL256 V0F38 V66  W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x45  VL256 V0F38 V66 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq REG2=YMM_B():r:qq
+
+}
+
+{
+ICLASS    : VPSRAVD
+CPL       : 3
+CATEGORY  : AVX2
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-4
+PATTERN : VV1 0x46  VL128 V0F38 V66  W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq MEM0:r:dq
+
+PATTERN : VV1 0x46  VL128 V0F38 V66 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():w:dq REG1=XMM_N():r:dq REG2=XMM_B():r:dq
+
+PATTERN : VV1 0x46  VL256 V0F38 V66  W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq MEM0:r:qq
+
+PATTERN : VV1 0x46  VL256 V0F38 V66 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():w:qq REG1=YMM_N():r:qq REG2=YMM_B():r:qq
+
+}
+
+
+
+
+###FILE: ./datafiles/avxhsw/movnt-load-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+
+
+{
+ICLASS    : VMOVNTDQA
+CPL       : 3
+CATEGORY  : DATAXFER
+EXTENSION : AVX2
+EXCEPTIONS: avx-type-1
+ATTRIBUTES :  REQUIRES_ALIGNMENT NOTSX NONTEMPORAL
+
+PATTERN : VV1 0x2A  V66 V0F38 VL256 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  :  REG0=YMM_R():w:qq MEM0:r:qq
+}
+
+
+
+
+
+###FILE: ./datafiles/avxhsw/vmfunc-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : VMFUNC
+CPL       : 3
+CATEGORY  : VTX
+EXTENSION : VMFUNC
+ISA_SET   : VMFUNC
+ATTRIBUTES :
+PATTERN   : 0x0F 0x01 MOD[0b11] MOD=3 REG[0b010] RM[0b100] no_refining_prefix
+OPERANDS  : REG0=XED_REG_EAX:r:SUPP
+}
+
+
+###FILE: ./datafiles/avxhsw/rtm.xed
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : XBEGIN
+CPL       : 3
+CATEGORY  : COND_BR
+EXTENSION : RTM
+COMMENT   : Not always a branch. If aborts, then branches & eax is written
+
+PATTERN   : 0xC7 MOD[0b11] MOD=3 REG[0b111] RM[0b000] BRDISPz()
+OPERANDS  : RELBR:r:z REG0=rIP():rw:SUPP REG1=XED_REG_EAX:cw:SUPP
+}
+
+{
+ICLASS    : XEND
+CPL       : 3
+CATEGORY  : COND_BR
+EXTENSION : RTM
+COMMENT   : Transaction end. may branch
+PATTERN   : 0x0F 0x01 MOD[0b11] MOD=3 REG[0b010] RM[0b101]  no_refining_prefix
+OPERANDS  :
+}
+
+{
+ICLASS    : XABORT
+CPL       : 3
+CATEGORY  : UNCOND_BR
+EXTENSION : RTM
+COMMENT   : Transaction abort. Branches. NOP outside of transaction; Thus eax is rcw.
+PATTERN   : 0xC6 MOD[0b11] MOD=3 REG[0b111] RM[0b000]  UIMM8()
+OPERANDS  : REG0=XED_REG_EAX:rcw:SUPP IMM0:r:b
+}
+
+
+{
+ICLASS    : XTEST
+CPL       : 3
+CATEGORY  : LOGICAL
+EXTENSION : RTM
+COMMENT   : test if in RTM transaction mode
+FLAGS     : MUST [ of-0 sf-0 zf-mod af-0 pf-0 cf-0 ]
+PATTERN   : 0x0F 0x01 MOD[0b11] MOD=3 REG[0b010] RM[0b110]  no_refining_prefix
+OPERANDS  :
+}
+
+
+###FILE: ./datafiles/avx/avx-fma-isa.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+AVX_INSTRUCTIONS()::
+
+# Issues: encoder is at a loss for vmaddps xmm0,xmm0,xmm0,xmm0.
+# Encoder must enforce equality between two parameters. Never had to do this before.
+#   Extra check?
+# Decoder must rip off suffixes _DDMR, _DDRM, _DRMD  in disassembly (eventually)
+#############################################################################################
+# Operand orders:
+#             A  =  B   *  C     +  D
+#Type 1)   reg0  reg0  mem/reg1  reg2          DDMR  312 or 132
+#Type 2)   reg0  reg0  reg1      mem/reg2      DDRM  123 or 213
+#Type 3)   reg0  reg1  mem/reg2  reg0          DRMD  321 or 231
+
+# dst is in MODRM.REG
+# regsrc is in VEX.vvvv
+# memop is in MODRM.RM
+############################################################################################
+
+
+
+
+
+
+
+
+
+
+
+
+##########################################################
+
+
+
+
+
+
+
+
+
+
+
+
+##################################################################
+
+
+
+
+
+
+
+
+
+
+
+
+
+##################################################################
+{
+ICLASS    : VFMADD132PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x98 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0x98 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0x98 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0x98 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFMADD132PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x98 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0x98 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0x98 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0x98 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFMADD132SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0x99 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0x99 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+}
+{
+ICLASS    : VFMADD132SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0x99  V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0x99  V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+
+}
+
+{
+ICLASS    : VFMADD213PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xA8 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64    MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xA8 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xA8 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64    MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xA8 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFMADD213PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xA8 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xA8 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0xA8 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xA8 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFMADD213SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xA9  V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64     MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0xA9  V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+
+}
+{
+ICLASS    : VFMADD213SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xA9  V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32     MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0xA9  V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VFMADD231PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xB8 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xB8 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xB8 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xB8 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+
+}
+{
+ICLASS    : VFMADD231PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xB8 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xB8 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+# R/M 256
+PATTERN : VV1 0xB8 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xB8 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+
+}
+{
+ICLASS    : VFMADD231SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xB9 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0xB9 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+
+}
+{
+ICLASS    : VFMADD231SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xB9 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0xB9 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+
+}
+
+
+###################################################
+{
+ICLASS    : VFMADDSUB132PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x96 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0x96 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0x96 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0x96 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFMADDSUB213PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xA6 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64    MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xA6 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xA6 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64    MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xA6 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFMADDSUB231PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xB6 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xB6 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xB6 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xB6 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+
+}
+
+{
+ICLASS    : VFMADDSUB132PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x96 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0x96 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0x96 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0x96 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFMADDSUB213PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xA6 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xA6 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0xA6 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xA6 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFMADDSUB231PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xB6 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xB6 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+# R/M 256
+PATTERN : VV1 0xB6 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xB6 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+
+}
+###################################################
+
+{
+ICLASS    : VFMSUBADD132PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x97 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0x97 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0x97 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0x97 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFMSUBADD213PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xA7 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64    MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xA7 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xA7 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64    MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xA7 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFMSUBADD231PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xB7 VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xB7 VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xB7 VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xB7 VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+
+}
+
+{
+ICLASS    : VFMSUBADD132PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x97 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0x97 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0x97 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0x97 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFMSUBADD213PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xA7 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xA7 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0xA7 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xA7 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFMSUBADD231PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xB7 VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xB7 VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+# R/M 256
+PATTERN : VV1 0xB7 VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xB7 VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+
+}
+
+
+###################################################
+
+{
+ICLASS    : VFMSUB132PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x9A VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0x9A VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0x9A VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0x9A VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFMSUB132PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x9A VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0x9A VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0x9A VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0x9A VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFMSUB132SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0x9B V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0x9B V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+}
+{
+ICLASS    : VFMSUB132SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0x9B  V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0x9B  V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+
+}
+
+{
+ICLASS    : VFMSUB213PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xAA VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64    MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xAA VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xAA VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64    MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xAA VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFMSUB213PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xAA VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xAA VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0xAA VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xAA VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFMSUB213SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xAB  V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64     MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0xAB  V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+
+}
+{
+ICLASS    : VFMSUB213SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xAB  V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32     MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0xAB  V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VFMSUB231PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xBA VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xBA VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xBA VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xBA VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+
+}
+{
+ICLASS    : VFMSUB231PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xBA VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xBA VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+# R/M 256
+PATTERN : VV1 0xBA VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xBA VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+
+}
+{
+ICLASS    : VFMSUB231SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xBB V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0xBB V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+
+}
+{
+ICLASS    : VFMSUB231SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xBB V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0xBB V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+
+}
+
+###################################################
+
+
+{
+ICLASS    : VFNMADD132PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x9C VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0x9C VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0x9C VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0x9C VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFNMADD132PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x9C VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0x9C VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0x9C VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0x9C VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFNMADD132SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0x9D V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0x9D V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+}
+{
+ICLASS    : VFNMADD132SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0x9D  V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0x9D  V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+
+}
+
+{
+ICLASS    : VFNMADD213PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xAC VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64    MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xAC VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xAC VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64    MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xAC VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFNMADD213PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xAC VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xAC VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0xAC VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xAC VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFNMADD213SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xAD  V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64     MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0xAD  V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+
+}
+{
+ICLASS    : VFNMADD213SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xAD  V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32     MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0xAD  V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VFNMADD231PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xBC VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xBC VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xBC VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xBC VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+
+}
+{
+ICLASS    : VFNMADD231PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xBC VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xBC VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+# R/M 256
+PATTERN : VV1 0xBC VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xBC VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+
+}
+{
+ICLASS    : VFNMADD231SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xBD V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0xBD V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+
+}
+{
+ICLASS    : VFNMADD231SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xBD V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0xBD V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+
+}
+
+###################################################
+
+
+{
+ICLASS    : VFNMSUB132PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x9E VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0x9E VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0x9E VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0x9E VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFNMSUB132PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0x9E VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0x9E VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0x9E VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0x9E VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFNMSUB132SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0x9F V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0x9F V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+}
+{
+ICLASS    : VFNMSUB132SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0x9F  V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0x9F  V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+
+}
+
+{
+ICLASS    : VFNMSUB213PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xAE VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64    MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xAE VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xAE VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64    MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xAE VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+}
+{
+ICLASS    : VFNMSUB213PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xAE VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xAE VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+
+# R/M 256
+PATTERN : VV1 0xAE VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xAE VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+}
+{
+ICLASS    : VFNMSUB213SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xAF  V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64     MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0xAF  V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+
+}
+{
+ICLASS    : VFNMSUB213SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xAF  V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32     MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0xAF  V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+}
+
+{
+ICLASS    : VFNMSUB231PD
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xBE VL128 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 MEM0:r:dq:f64
+# R/R 128
+PATTERN : VV1 0xBE VL128 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:dq:f64 REG2=XMM_B():r:dq:f64
+
+
+# R/M 256
+PATTERN : VV1 0xBE VL256 V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 MEM0:r:qq:f64
+# R/R 256
+PATTERN : VV1 0xBE VL256 V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f64 REG1=YMM_N():r:qq:f64 REG2=YMM_B():r:qq:f64
+
+}
+{
+ICLASS    : VFNMSUB231PS
+EXCEPTIONS: avx-type-2
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR
+# R/M 128
+PATTERN : VV1 0xBE VL128 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 MEM0:r:dq:f32
+# R/R 128
+PATTERN : VV1 0xBE VL128 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:dq:f32 REG2=XMM_B():r:dq:f32
+
+# R/M 256
+PATTERN : VV1 0xBE VL256 V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 MEM0:r:qq:f32
+# R/R 256
+PATTERN : VV1 0xBE VL256 V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=YMM_R():rw:qq:f32 REG1=YMM_N():r:qq:f32 REG2=YMM_B():r:qq:f32
+
+}
+{
+ICLASS    : VFNMSUB231SD
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xBF V66 V0F38 W1 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 MEM0:r:q:f64
+# R/R 128
+PATTERN : VV1 0xBF V66 V0F38 W1 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f64 REG1=XMM_N():r:q:f64 REG2=XMM_B():r:q:f64
+
+}
+{
+ICLASS    : VFNMSUB231SS
+EXCEPTIONS: avx-type-3
+CPL       : 3
+CATEGORY  : VFMA
+EXTENSION : FMA
+ATTRIBUTES: MXCSR simd_scalar
+# R/M 128
+PATTERN : VV1 0xBF V66 V0F38 W0 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM()
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 MEM0:r:d:f32
+# R/R 128
+PATTERN : VV1 0xBF V66 V0F38 W0 MOD[0b11] MOD=3 REG[rrr] RM[nnn]
+OPERANDS  : REG0=XMM_R():rw:dq:f32 REG1=XMM_N():r:d:f32 REG2=XMM_B():r:d:f32
+
+}
+
+###################################################
+
+
+
+
+
+
+###FILE: ./datafiles/bdw/adox-adcx.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : ADCX
+CPL       : 3
+CATEGORY  : ADOX_ADCX
+EXTENSION : ADOX_ADCX
+ISA_SET   : ADOX_ADCX
+
+FLAGS     : MUST [ cf-tst cf-mod ]
+
+# reg:rw rm:r
+# 32b
+PATTERN   : 0x0F 0x38 0xF6  MOD[0b11] MOD=3 REG[rrr] RM[nnn] osz_refining_prefix W0  IMMUNE66()
+OPERANDS  : REG0=GPR32_R():rw:d REG1=GPR32_B():r:d
+PATTERN   : 0x0F 0x38 0xF6   MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() osz_refining_prefix W0  IMMUNE66()
+OPERANDS  : REG0=GPR32_R():rw:d MEM0:r:d
+
+# 64b
+PATTERN   : 0x0F 0x38 0xF6  MOD[0b11] MOD=3 REG[rrr] RM[nnn] osz_refining_prefix  W1 IMMUNE66()
+OPERANDS  : REG0=GPR64_R():rw:q  REG1=GPR64_B():r:q
+PATTERN   : 0x0F 0x38 0xF6  MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM() osz_refining_prefix  W1  IMMUNE66()
+OPERANDS  : REG0=GPR64_R():rw:q  MEM0:r:q
+}
+
+
+
+{
+ICLASS    : ADOX
+CPL       : 3
+CATEGORY  : ADOX_ADCX
+EXTENSION : ADOX_ADCX
+ISA_SET   : ADOX_ADCX
+
+FLAGS     : MUST [ of-tst of-mod ]
+
+# reg:rw rm:r
+# 32b
+PATTERN   : 0x0F 0x38 0xF6  MOD[0b11] MOD=3 REG[rrr] RM[nnn] refining_f3  W0 IMMUNE66()
+OPERANDS  : REG0=GPR32_R():rw:d  REG1=GPR32_B():r:d
+PATTERN   : 0x0F 0x38 0xF6  MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() refining_f3 W0 IMMUNE66()
+OPERANDS  : REG0=GPR32_R():rw:d MEM0:r:d
+
+# 64b
+PATTERN   : 0x0F 0x38 0xF6 MOD[0b11] MOD=3 REG[rrr] RM[nnn] refining_f3 W1 IMMUNE66()
+OPERANDS  : REG0=GPR64_R():rw:q  REG1=GPR64_B():r:q
+PATTERN   : 0x0F 0x38 0xF6 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM() refining_f3 W1   IMMUNE66()
+OPERANDS  : REG0=GPR64_R():rw:q  MEM0:r:q
+}
+
+
+
+###FILE: ./datafiles/bdw/rdseed.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+INSTRUCTIONS()::
+
+{
+ICLASS    : RDSEED
+CPL       : 3
+CATEGORY  : RDSEED
+EXTENSION : RDSEED
+ISA_SET   : RDSEED
+FLAGS     : MUST [ cf-mod zf-0 of-0 af-0 pf-0 sf-0 ]
+PATTERN   : 0x0F 0xC7  MOD[0b11] MOD=3 REG[0b111] RM[nnn] not_refining
+OPERANDS  : REG0=GPRv_B():w
+}
+
+
+
+###FILE: ./datafiles/bdw/smap.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+INSTRUCTIONS()::
+
+{
+ICLASS    : CLAC
+CPL       : 0
+CATEGORY  : SMAP
+EXTENSION : SMAP
+FLAGS     : MUST [ ac-0 ]
+# 0F 01 CA = 1100_1010 = 11_001_010
+PATTERN   : 0x0F 0x01  MOD[0b11] MOD=3 REG[0b001] RM[0b010] no_refining_prefix
+OPERANDS  :
+}
+
+{
+ICLASS    : STAC
+CPL       : 0
+CATEGORY  : SMAP
+EXTENSION : SMAP
+FLAGS     : MUST [ ac-1 ]
+# 0F 01 CB = 1100_1011 = 11_001_011
+PATTERN   : 0x0F 0x01  MOD[0b11] MOD=3 REG[0b001] RM[0b011] no_refining_prefix
+OPERANDS  :
+}
+
+
+
+###FILE: ./datafiles/sgx/sgx-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+INSTRUCTIONS()::
+
+# Both read EAX
+# Both may read or write or r/w  RBX, RCX, RDX
+# ENCLU 0f 01 D7
+# D7 =  1101 0111
+
+# ENCLS 0f 01 CF
+# CF = 1100_1111
+
+
+
+{
+ICLASS: ENCLU
+CPL: 3
+CATEGORY:  SGX
+EXTENSION: SGX
+ISA_SET:   SGX
+COMMENT:   May set flags
+PATTERN: 0x0F 0x01 MOD[0b11] MOD=3 REG[0b010] RM[0b111] no_refining_prefix
+OPERANDS: REG0=XED_REG_EAX:r:SUPP    \
+          REG1=XED_REG_RBX:crw:SUPP  \
+          REG2=XED_REG_RCX:crw:SUPP  \
+          REG3=XED_REG_RDX:crw:SUPP
+}
+
+{
+
+ICLASS: ENCLS
+CPL: 0
+CATEGORY:  SGX
+EXTENSION: SGX
+ISA_SET:   SGX
+COMMENT:   May set flags
+PATTERN: 0x0F 0x01 MOD[0b11] MOD=3 REG[0b001] RM[0b111] no_refining_prefix
+OPERANDS: REG0=XED_REG_EAX:r:SUPP    \
+          REG1=XED_REG_RBX:crw:SUPP  \
+          REG2=XED_REG_RCX:crw:SUPP  \
+          REG3=XED_REG_RDX:crw:SUPP
+
+}
+
+
+###FILE: ./datafiles/clflushopt/clflushopt.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+INSTRUCTIONS()::
+
+{
+ICLASS: CLFLUSHOPT
+CPL: 3
+CATEGORY:  CLFLUSHOPT
+EXTENSION: CLFLUSHOPT
+ISA_SET:   CLFLUSHOPT
+ATTRIBUTES: PREFETCH  # check TSX-friendlyness
+PATTERN   : 0x0F 0xAE  MOD[mm] MOD!=3 REG[0b111] RM[nnn]  osz_refining_prefix REFINING66() MODRM()
+OPERANDS  : MEM0:r:mprefetch
+}
+
+
+
+
+###FILE: ./datafiles/pku/pku-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+
+INSTRUCTIONS()::
+
+{
+ICLASS:      RDPKRU
+CPL:         3
+CATEGORY:    PKU
+EXTENSION:   PKU
+ISA_SET:     PKU
+ATTRIBUTES:
+PATTERN:    0x0F 0x01 MOD[0b11] MOD=3 REG[0b101] RM[0b110]  no_refining_prefix
+OPERANDS:    REG0=XED_REG_EDX:w:SUPP REG1=XED_REG_EAX:w:SUPP REG2=XED_REG_ECX:r:SUPP
+}
+
+
+{
+ICLASS:      WRPKRU
+CPL:         3
+CATEGORY:    PKU
+EXTENSION:   PKU
+ISA_SET:     PKU
+ATTRIBUTES:
+PATTERN:    0x0F 0x01 MOD[0b11] MOD=3 REG[0b101] RM[0b111]  no_refining_prefix
+OPERANDS:    REG0=XED_REG_EDX:r:SUPP REG1=XED_REG_EAX:r:SUPP REG2=XED_REG_ECX:r:SUPP
+}
+
+
+
+###FILE: ./datafiles/clwb/clwb.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+INSTRUCTIONS()::
+
+{
+ICLASS: CLWB
+CPL: 3
+CATEGORY:  CLWB
+EXTENSION: CLWB
+ISA_SET:   CLWB
+ATTRIBUTES: PREFETCH  # check TSX-friendlyness
+PATTERN   : 0x0F 0xAE  MOD[mm] MOD!=3 REG[0b110] RM[nnn]  osz_refining_prefix REFINING66() MODRM()
+OPERANDS  : MEM0:r:mprefetch
+}
+
+
+
+
+###FILE: ./datafiles/knl/knl-fixup.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+INSTRUCTIONS()::
+UDELETE     : PREFETCH_RESERVED_0F0Dr2
+
+
+###FILE: ./datafiles/knl/knl-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VEXP2PD (VEXP2PD-512-1)
+{
+ICLASS:      VEXP2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xC8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VEXP2PD_ZMMf64_MASKmskw_ZMMf64_AVX512ER
+}
+
+{
+ICLASS:      VEXP2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xC8 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VEXP2PD_ZMMf64_MASKmskw_ZMMf64_AVX512ER
+}
+
+{
+ICLASS:      VEXP2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VEXP2PD_ZMMf64_MASKmskw_MEMf64_AVX512ER
+}
+
+
+# EMITTING VEXP2PS (VEXP2PS-512-1)
+{
+ICLASS:      VEXP2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xC8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VEXP2PS_ZMMf32_MASKmskw_ZMMf32_AVX512ER
+}
+
+{
+ICLASS:      VEXP2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xC8 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VEXP2PS_ZMMf32_MASKmskw_ZMMf32_AVX512ER
+}
+
+{
+ICLASS:      VEXP2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VEXP2PS_ZMMf32_MASKmskw_MEMf32_AVX512ER
+}
+
+
+# EMITTING VGATHERPF0DPD (VGATHERPF0DPD-512-1)
+{
+ICLASS:      VGATHERPF0DPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED DWORD_INDICES GATHER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC6 V66 V0F38 MOD[mm] MOD!=3 REG[0b001] RM[nnn] BCRC=0   VL512  W1 RM=4 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f64 REG0=MASKNOT0():rw:mskw
+IFORM:       VGATHERPF0DPD_MEMf64_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VGATHERPF0DPS (VGATHERPF0DPS-512-1)
+{
+ICLASS:      VGATHERPF0DPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED DWORD_INDICES GATHER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC6 V66 V0F38 MOD[mm] MOD!=3 REG[0b001] RM[nnn] BCRC=0   VL512  W0 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f32 REG0=MASKNOT0():rw:mskw
+IFORM:       VGATHERPF0DPS_MEMf32_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VGATHERPF0QPD (VGATHERPF0QPD-512-1)
+{
+ICLASS:      VGATHERPF0QPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED QWORD_INDICES GATHER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC7 V66 V0F38 MOD[mm] MOD!=3 REG[0b001] RM[nnn] BCRC=0   VL512  W1 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f64 REG0=MASKNOT0():rw:mskw
+IFORM:       VGATHERPF0QPD_MEMf64_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VGATHERPF0QPS (VGATHERPF0QPS-512-1)
+{
+ICLASS:      VGATHERPF0QPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED QWORD_INDICES GATHER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC7 V66 V0F38 MOD[mm] MOD!=3 REG[0b001] RM[nnn] BCRC=0   VL512  W0 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f32 REG0=MASKNOT0():rw:mskw
+IFORM:       VGATHERPF0QPS_MEMf32_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VGATHERPF1DPD (VGATHERPF1DPD-512-1)
+{
+ICLASS:      VGATHERPF1DPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED DWORD_INDICES GATHER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC6 V66 V0F38 MOD[mm] MOD!=3 REG[0b010] RM[nnn] BCRC=0   VL512  W1 RM=4 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f64 REG0=MASKNOT0():rw:mskw
+IFORM:       VGATHERPF1DPD_MEMf64_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VGATHERPF1DPS (VGATHERPF1DPS-512-1)
+{
+ICLASS:      VGATHERPF1DPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED DWORD_INDICES GATHER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC6 V66 V0F38 MOD[mm] MOD!=3 REG[0b010] RM[nnn] BCRC=0   VL512  W0 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f32 REG0=MASKNOT0():rw:mskw
+IFORM:       VGATHERPF1DPS_MEMf32_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VGATHERPF1QPD (VGATHERPF1QPD-512-1)
+{
+ICLASS:      VGATHERPF1QPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED QWORD_INDICES GATHER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC7 V66 V0F38 MOD[mm] MOD!=3 REG[0b010] RM[nnn] BCRC=0   VL512  W1 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f64 REG0=MASKNOT0():rw:mskw
+IFORM:       VGATHERPF1QPD_MEMf64_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VGATHERPF1QPS (VGATHERPF1QPS-512-1)
+{
+ICLASS:      VGATHERPF1QPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED QWORD_INDICES GATHER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC7 V66 V0F38 MOD[mm] MOD!=3 REG[0b010] RM[nnn] BCRC=0   VL512  W0 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f32 REG0=MASKNOT0():rw:mskw
+IFORM:       VGATHERPF1QPS_MEMf32_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VRCP28PD (VRCP28PD-512-1)
+{
+ICLASS:      VRCP28PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xCA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VRCP28PD_ZMMf64_MASKmskw_ZMMf64_AVX512ER
+}
+
+{
+ICLASS:      VRCP28PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xCA V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VRCP28PD_ZMMf64_MASKmskw_ZMMf64_AVX512ER
+}
+
+{
+ICLASS:      VRCP28PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VRCP28PD_ZMMf64_MASKmskw_MEMf64_AVX512ER
+}
+
+
+# EMITTING VRCP28PS (VRCP28PS-512-1)
+{
+ICLASS:      VRCP28PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xCA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VRCP28PS_ZMMf32_MASKmskw_ZMMf32_AVX512ER
+}
+
+{
+ICLASS:      VRCP28PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xCA V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VRCP28PS_ZMMf32_MASKmskw_ZMMf32_AVX512ER
+}
+
+{
+ICLASS:      VRCP28PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VRCP28PS_ZMMf32_MASKmskw_MEMf32_AVX512ER
+}
+
+
+# EMITTING VRCP28SD (VRCP28SD-128-1)
+{
+ICLASS:      VRCP28SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MASKOP_EVEX
+PATTERN:    EVV 0xCB V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VRCP28SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512ER
+}
+
+{
+ICLASS:      VRCP28SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MASKOP_EVEX
+PATTERN:    EVV 0xCB V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VRCP28SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512ER
+}
+
+{
+ICLASS:      VRCP28SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_SCALAR
+PATTERN:    EVV 0xCB V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VRCP28SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512ER
+}
+
+
+# EMITTING VRCP28SS (VRCP28SS-128-1)
+{
+ICLASS:      VRCP28SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MASKOP_EVEX
+PATTERN:    EVV 0xCB V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VRCP28SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512ER
+}
+
+{
+ICLASS:      VRCP28SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MASKOP_EVEX
+PATTERN:    EVV 0xCB V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VRCP28SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512ER
+}
+
+{
+ICLASS:      VRCP28SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_SCALAR
+PATTERN:    EVV 0xCB V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VRCP28SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512ER
+}
+
+
+# EMITTING VRSQRT28PD (VRSQRT28PD-512-1)
+{
+ICLASS:      VRSQRT28PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xCC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VRSQRT28PD_ZMMf64_MASKmskw_ZMMf64_AVX512ER
+}
+
+{
+ICLASS:      VRSQRT28PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xCC V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VRSQRT28PD_ZMMf64_MASKmskw_ZMMf64_AVX512ER
+}
+
+{
+ICLASS:      VRSQRT28PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VRSQRT28PD_ZMMf64_MASKmskw_MEMf64_AVX512ER
+}
+
+
+# EMITTING VRSQRT28PS (VRSQRT28PS-512-1)
+{
+ICLASS:      VRSQRT28PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xCC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VRSQRT28PS_ZMMf32_MASKmskw_ZMMf32_AVX512ER
+}
+
+{
+ICLASS:      VRSQRT28PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xCC V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VRSQRT28PS_ZMMf32_MASKmskw_ZMMf32_AVX512ER
+}
+
+{
+ICLASS:      VRSQRT28PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VRSQRT28PS_ZMMf32_MASKmskw_MEMf32_AVX512ER
+}
+
+
+# EMITTING VRSQRT28SD (VRSQRT28SD-128-1)
+{
+ICLASS:      VRSQRT28SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MASKOP_EVEX
+PATTERN:    EVV 0xCD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VRSQRT28SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512ER
+}
+
+{
+ICLASS:      VRSQRT28SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MASKOP_EVEX
+PATTERN:    EVV 0xCD V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VRSQRT28SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512ER
+}
+
+{
+ICLASS:      VRSQRT28SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_SCALAR
+PATTERN:    EVV 0xCD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VRSQRT28SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512ER
+}
+
+
+# EMITTING VRSQRT28SS (VRSQRT28SS-128-1)
+{
+ICLASS:      VRSQRT28SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MASKOP_EVEX
+PATTERN:    EVV 0xCD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VRSQRT28SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512ER
+}
+
+{
+ICLASS:      VRSQRT28SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MASKOP_EVEX
+PATTERN:    EVV 0xCD V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VRSQRT28SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512ER
+}
+
+{
+ICLASS:      VRSQRT28SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512ER_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_SCALAR
+PATTERN:    EVV 0xCD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VRSQRT28SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512ER
+}
+
+
+# EMITTING VSCATTERPF0DPD (VSCATTERPF0DPD-512-1)
+{
+ICLASS:      VSCATTERPF0DPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED DWORD_INDICES SCATTER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC6 V66 V0F38 MOD[mm] MOD!=3 REG[0b101] RM[nnn] BCRC=0   VL512  W1 RM=4 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f64 REG0=MASKNOT0():rw:mskw
+IFORM:       VSCATTERPF0DPD_MEMf64_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VSCATTERPF0DPS (VSCATTERPF0DPS-512-1)
+{
+ICLASS:      VSCATTERPF0DPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED DWORD_INDICES SCATTER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC6 V66 V0F38 MOD[mm] MOD!=3 REG[0b101] RM[nnn] BCRC=0   VL512  W0 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f32 REG0=MASKNOT0():rw:mskw
+IFORM:       VSCATTERPF0DPS_MEMf32_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VSCATTERPF0QPD (VSCATTERPF0QPD-512-1)
+{
+ICLASS:      VSCATTERPF0QPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED QWORD_INDICES SCATTER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC7 V66 V0F38 MOD[mm] MOD!=3 REG[0b101] RM[nnn] BCRC=0   VL512  W1 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f64 REG0=MASKNOT0():rw:mskw
+IFORM:       VSCATTERPF0QPD_MEMf64_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VSCATTERPF0QPS (VSCATTERPF0QPS-512-1)
+{
+ICLASS:      VSCATTERPF0QPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED QWORD_INDICES SCATTER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC7 V66 V0F38 MOD[mm] MOD!=3 REG[0b101] RM[nnn] BCRC=0   VL512  W0 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f32 REG0=MASKNOT0():rw:mskw
+IFORM:       VSCATTERPF0QPS_MEMf32_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VSCATTERPF1DPD (VSCATTERPF1DPD-512-1)
+{
+ICLASS:      VSCATTERPF1DPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED DWORD_INDICES SCATTER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC6 V66 V0F38 MOD[mm] MOD!=3 REG[0b110] RM[nnn] BCRC=0   VL512  W1 RM=4 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f64 REG0=MASKNOT0():rw:mskw
+IFORM:       VSCATTERPF1DPD_MEMf64_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VSCATTERPF1DPS (VSCATTERPF1DPS-512-1)
+{
+ICLASS:      VSCATTERPF1DPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED DWORD_INDICES SCATTER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC6 V66 V0F38 MOD[mm] MOD!=3 REG[0b110] RM[nnn] BCRC=0   VL512  W0 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f32 REG0=MASKNOT0():rw:mskw
+IFORM:       VSCATTERPF1DPS_MEMf32_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VSCATTERPF1QPD (VSCATTERPF1QPD-512-1)
+{
+ICLASS:      VSCATTERPF1QPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED QWORD_INDICES SCATTER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC7 V66 V0F38 MOD[mm] MOD!=3 REG[0b110] RM[nnn] BCRC=0   VL512  W1 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f64 REG0=MASKNOT0():rw:mskw
+IFORM:       VSCATTERPF1QPD_MEMf64_MASKmskw_AVX512PF_VL512
+}
+
+
+# EMITTING VSCATTERPF1QPS (VSCATTERPF1QPS-512-1)
+{
+ICLASS:      VSCATTERPF1QPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512PF_512
+EXCEPTIONS:     AVX512-E12NP
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION SPECIAL_AGEN_REQUIRED QWORD_INDICES SCATTER PREFETCH MASKOP_EVEX DISP8_GSCAT
+PATTERN:    EVV 0xC7 V66 V0F38 MOD[mm] MOD!=3 REG[0b110] RM[nnn] BCRC=0   VL512  W0 RM=4 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:r:b:f32 REG0=MASKNOT0():rw:mskw
+IFORM:       VSCATTERPF1QPS_MEMf32_MASKmskw_AVX512PF_VL512
+}
+
+
+INSTRUCTIONS()::
+# EMITTING PREFETCHWT1 (PREFETCHWT1-N/A-1)
+{
+ICLASS:      PREFETCHWT1
+CPL:         3
+CATEGORY:    PREFETCHWT1
+EXTENSION:   PREFETCHWT1
+ISA_SET:     PREFETCHWT1
+REAL_OPCODE: Y
+ATTRIBUTES:  PREFETCH
+PATTERN:     0x0F 0x0D MOD[mm] MOD!=3 REG[0b010] RM[nnn]  MODRM()
+OPERANDS:    MEM0:r:b:u8
+IFORM:       PREFETCHWT1_MEMu8
+}
+
+
+
+
+###FILE: ./datafiles/4fmaps-512/4fmaps-512-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING V4FMADDPS (V4FMADDPS-512-1)
+{
+ICLASS:      V4FMADDPS
+CPL:         3
+CATEGORY:    AVX512_4FMAPS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_4FMAPS_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MULTISOURCE4 DISP8_TUPLE1_4X MXCSR MASKOP_EVEX
+PATTERN:    EVV 0x9A VF2 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_TUPLE1_4X()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32:MULTISOURCE4 MEM0:r:dq:f32
+IFORM:       V4FMADDPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING V4FMADDSS (V4FMADDSS-128-1)
+{
+ICLASS:      V4FMADDSS
+CPL:         3
+CATEGORY:    AVX512_4FMAPS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_4FMAPS_SCALAR
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_TUPLE1_4X MXCSR MULTISOURCE4 MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SIMD_SCALAR
+PATTERN:    EVV 0x9B VF2 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_TUPLE1_4X()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32:MULTISOURCE4 MEM0:r:dq:f32
+IFORM:       V4FMADDSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING V4FNMADDPS (V4FNMADDPS-512-1)
+{
+ICLASS:      V4FNMADDPS
+CPL:         3
+CATEGORY:    AVX512_4FMAPS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_4FMAPS_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MULTISOURCE4 DISP8_TUPLE1_4X MXCSR MASKOP_EVEX
+PATTERN:    EVV 0xAA VF2 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_TUPLE1_4X()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32:MULTISOURCE4 MEM0:r:dq:f32
+IFORM:       V4FNMADDPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING V4FNMADDSS (V4FNMADDSS-128-1)
+{
+ICLASS:      V4FNMADDSS
+CPL:         3
+CATEGORY:    AVX512_4FMAPS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_4FMAPS_SCALAR
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_TUPLE1_4X MXCSR MULTISOURCE4 MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SIMD_SCALAR
+PATTERN:    EVV 0xAB VF2 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_TUPLE1_4X()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32:MULTISOURCE4 MEM0:r:dq:f32
+IFORM:       V4FNMADDSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/4vnniw-512/4vnniw-512-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VP4DPWSSD (VP4DPWSSD-512-1)
+{
+ICLASS:      VP4DPWSSD
+CPL:         3
+CATEGORY:    AVX512_4VNNIW
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_4VNNIW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MULTISOURCE4 DISP8_TUPLE1_4X MASKOP_EVEX
+PATTERN:    EVV 0x52 VF2 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() BCRC=0  VL512  W0    ESIZE_32_BITS() NELEM_TUPLE1_4X()
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16:MULTISOURCE4 MEM0:r:dq:u32
+IFORM:       VP4DPWSSD_ZMMi32_MASKmskw_ZMMi16_MEMu32_AVX512
+}
+
+
+# EMITTING VP4DPWSSDS (VP4DPWSSDS-512-1)
+{
+ICLASS:      VP4DPWSSDS
+CPL:         3
+CATEGORY:    AVX512_4VNNIW
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_4VNNIW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MULTISOURCE4 DISP8_TUPLE1_4X MASKOP_EVEX
+PATTERN:    EVV 0x53 VF2 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() BCRC=0  VL512  W0    ESIZE_32_BITS() NELEM_TUPLE1_4X()
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16:MULTISOURCE4 MEM0:r:dq:u32
+IFORM:       VP4DPWSSDS_ZMMi32_MASKmskw_ZMMi16_MEMu32_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/vpopcntdq-512/vpopcntdq-512-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VPOPCNTD (VPOPCNTD-512-1)
+{
+ICLASS:      VPOPCNTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32
+IFORM:       VPOPCNTD_ZMMu32_MASKmskw_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPOPCNTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPOPCNTD_ZMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPOPCNTQ (VPOPCNTQ-512-1)
+{
+ICLASS:      VPOPCNTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VPOPCNTQ_ZMMu64_MASKmskw_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPOPCNTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPOPCNTQ_ZMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/avx512f/avx512-foundation-isa.xed.txt
+
+#BEGIN_LEGAL
+#INTEL CONFIDENTIAL
+#
+#Copyright (c) 2017, Intel Corporation. All rights reserved.
+#
+#The source code contained or described herein and all documents
+#related to the source code ("Material") are owned by Intel Corporation
+#or its suppliers or licensors. Title to the Material remains with
+#Intel Corporation or its suppliers and licensors. The Material
+#contains trade secrets and proprietary and confidential information of
+#Intel or its suppliers and licensors. The Material is protected by
+#worldwide copyright and trade secret laws and treaty provisions. No
+#part of the Material may be used, copied, reproduced, modified,
+#published, uploaded, posted, transmitted, distributed, or disclosed in
+#any way without Intel's prior express written permission.
+#
+#No license under any patent, copyright, trade secret or other
+#intellectual property right is granted to or conferred upon you by
+#disclosure or delivery of the Materials, either expressly, by
+#implication, inducement, estoppel or otherwise. Any license under such
+#intellectual property rights must be express and approved by Intel in
+#writing.
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VADDPD (VADDPD-512-1)
+{
+ICLASS:      VADDPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x58 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VADDPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VADDPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x58 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VADDPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VADDPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x58 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VADDPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VADDPS (VADDPS-512-1)
+{
+ICLASS:      VADDPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x58 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VADDPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VADDPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x58 VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VADDPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VADDPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x58 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VADDPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VADDSD (VADDSD-128-1)
+{
+ICLASS:      VADDSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x58 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VADDSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VADDSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x58 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VADDSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VADDSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x58 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VADDSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VADDSS (VADDSS-128-1)
+{
+ICLASS:      VADDSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x58 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VADDSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VADDSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x58 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VADDSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VADDSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x58 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VADDSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VALIGND (VALIGND-512-1)
+{
+ICLASS:      VALIGND
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x03 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VALIGND_ZMMu32_MASKmskw_ZMMu32_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VALIGND
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x03 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VALIGND_ZMMu32_MASKmskw_ZMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VALIGNQ (VALIGNQ-512-1)
+{
+ICLASS:      VALIGNQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x03 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VALIGNQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VALIGNQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x03 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VALIGNQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VBLENDMPD (VBLENDMPD-512-1)
+{
+ICLASS:      VBLENDMPD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VBLENDMPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VBLENDMPD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VBLENDMPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VBLENDMPS (VBLENDMPS-512-1)
+{
+ICLASS:      VBLENDMPS
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VBLENDMPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VBLENDMPS
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VBLENDMPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTF32X4 (VBROADCASTF32X4-512-1)
+{
+ICLASS:      VBROADCASTF32X4
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x1A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f32 EMX_BROADCAST_4TO16_32
+IFORM:       VBROADCASTF32X4_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTF64X4 (VBROADCASTF64X4-512-1)
+{
+ICLASS:      VBROADCASTF64X4
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x1B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f64 EMX_BROADCAST_4TO8_64
+IFORM:       VBROADCASTF64X4_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VBROADCASTI32X4 (VBROADCASTI32X4-512-1)
+{
+ICLASS:      VBROADCASTI32X4
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x5A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u32 EMX_BROADCAST_4TO16_32
+IFORM:       VBROADCASTI32X4_ZMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VBROADCASTI64X4 (VBROADCASTI64X4-512-1)
+{
+ICLASS:      VBROADCASTI64X4
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x5B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u64 EMX_BROADCAST_4TO8_64
+IFORM:       VBROADCASTI64X4_ZMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VBROADCASTSD (VBROADCASTSD-512-1)
+{
+ICLASS:      VBROADCASTSD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x19 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:f64 EMX_BROADCAST_1TO8_64
+IFORM:       VBROADCASTSD_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VBROADCASTSD (VBROADCASTSD-512-2)
+{
+ICLASS:      VBROADCASTSD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x19 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64 EMX_BROADCAST_1TO8_64
+IFORM:       VBROADCASTSD_ZMMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VBROADCASTSS (VBROADCASTSS-512-1)
+{
+ICLASS:      VBROADCASTSS
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x18 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:f32 EMX_BROADCAST_1TO16_32
+IFORM:       VBROADCASTSS_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTSS (VBROADCASTSS-512-2)
+{
+ICLASS:      VBROADCASTSS
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x18 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 EMX_BROADCAST_1TO16_32
+IFORM:       VBROADCASTSS_ZMMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VCMPPD (VCMPPD-512-1)
+{
+ICLASS:      VCMPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xC2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VCMPPD_MASKmskw_MASKmskw_ZMMf64_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xC2 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw:TXT=SAESTR REG1=MASK1():r:mskw REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VCMPPD_MASKmskw_MASKmskw_ZMMf64_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xC2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VCMPPD_MASKmskw_MASKmskw_ZMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VCMPPS (VCMPPS-512-1)
+{
+ICLASS:      VCMPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xC2 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VCMPPS_MASKmskw_MASKmskw_ZMMf32_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xC2 VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw:TXT=SAESTR REG1=MASK1():r:mskw REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VCMPPS_MASKmskw_MASKmskw_ZMMf32_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xC2 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VCMPPS_MASKmskw_MASKmskw_ZMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCMPSD (VCMPSD-128-1)
+{
+ICLASS:      VCMPSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xC2 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VCMPSD_MASKmskw_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xC2 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw:TXT=SAESTR REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VCMPSD_MASKmskw_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xC2 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64 IMM0:r:b
+IFORM:       VCMPSD_MASKmskw_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VCMPSS (VCMPSS-128-1)
+{
+ICLASS:      VCMPSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xC2 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VCMPSS_MASKmskw_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xC2 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw:TXT=SAESTR REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VCMPSS_MASKmskw_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xC2 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+IFORM:       VCMPSS_MASKmskw_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCOMISD (VCOMISD-128-1)
+{
+ICLASS:      VCOMISD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():r:dq:f64 REG1=XMM_B3():r:dq:f64
+IFORM:       VCOMISD_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCOMISD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2F V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():r:dq:f64:TXT=SAESTR REG1=XMM_B3():r:dq:f64
+IFORM:       VCOMISD_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCOMISD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x2F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VCOMISD_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VCOMISS (VCOMISS-128-1)
+{
+ICLASS:      VCOMISS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2F VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():r:dq:f32 REG1=XMM_B3():r:dq:f32
+IFORM:       VCOMISS_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCOMISS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2F VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():r:dq:f32:TXT=SAESTR REG1=XMM_B3():r:dq:f32
+IFORM:       VCOMISS_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCOMISS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x2F VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VCOMISS_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VCOMPRESSPD (VCOMPRESSPD-512-1)
+{
+ICLASS:      VCOMPRESSPD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:zd:f64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf64
+IFORM:       VCOMPRESSPD_MEMf64_MASKmskw_ZMMf64_AVX512
+}
+
+
+# EMITTING VCOMPRESSPD (VCOMPRESSPD-512-2)
+{
+ICLASS:      VCOMPRESSPD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf64
+IFORM:       VCOMPRESSPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+
+# EMITTING VCOMPRESSPS (VCOMPRESSPS-512-1)
+{
+ICLASS:      VCOMPRESSPS
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:zd:f32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf32
+IFORM:       VCOMPRESSPS_MEMf32_MASKmskw_ZMMf32_AVX512
+}
+
+
+# EMITTING VCOMPRESSPS (VCOMPRESSPS-512-2)
+{
+ICLASS:      VCOMPRESSPS
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf32
+IFORM:       VCOMPRESSPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+
+# EMITTING VCVTDQ2PD (VCVTDQ2PD-512-1)
+{
+ICLASS:      VCVTDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE6 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i32
+IFORM:       VCVTDQ2PD_ZMMf64_MASKmskw_YMMi32_AVX512
+}
+
+{
+ICLASS:      VCVTDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VCVTDQ2PD_ZMMf64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VCVTDQ2PS (VCVTDQ2PS-512-1)
+{
+ICLASS:      VCVTDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zi32
+IFORM:       VCVTDQ2PS_ZMMf32_MASKmskw_ZMMi32_AVX512
+}
+
+{
+ICLASS:      VCVTDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zi32
+IFORM:       VCVTDQ2PS_ZMMf32_MASKmskw_ZMMi32_AVX512
+}
+
+{
+ICLASS:      VCVTDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VCVTDQ2PS_ZMMf32_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VCVTPD2DQ (VCVTPD2DQ-512-1)
+{
+ICLASS:      VCVTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2DQ_YMMi32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2DQ_YMMi32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2DQ_YMMi32_MASKmskw_MEMf64_AVX512_VL512
+}
+
+
+# EMITTING VCVTPD2PS (VCVTPD2PS-512-1)
+{
+ICLASS:      VCVTPD2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2PS_YMMf32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTPD2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5A V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2PS_YMMf32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTPD2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2PS_YMMf32_MASKmskw_MEMf64_AVX512_VL512
+}
+
+
+# EMITTING VCVTPD2UDQ (VCVTPD2UDQ-512-1)
+{
+ICLASS:      VCVTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2UDQ_YMMu32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2UDQ_YMMu32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2UDQ_YMMu32_MASKmskw_MEMf64_AVX512_VL512
+}
+
+
+# EMITTING VCVTPH2PS (VCVTPH2PS-512-1)
+{
+ICLASS:      VCVTPH2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x13 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f16
+IFORM:       VCVTPH2PS_ZMMf32_MASKmskw_YMMf16_AVX512
+}
+
+{
+ICLASS:      VCVTPH2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x13 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f16
+IFORM:       VCVTPH2PS_ZMMf32_MASKmskw_YMMf16_AVX512
+}
+
+{
+ICLASS:      VCVTPH2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR DISP8_HALFMEM
+PATTERN:    EVV 0x13 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f16
+IFORM:       VCVTPH2PS_ZMMf32_MASKmskw_MEMf16_AVX512
+}
+
+
+# EMITTING VCVTPS2DQ (VCVTPS2DQ-512-1)
+{
+ICLASS:      VCVTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VCVTPS2DQ_ZMMi32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VCVTPS2DQ_ZMMi32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2DQ_ZMMi32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2PD (VCVTPS2PD-512-1)
+{
+ICLASS:      VCVTPS2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5A VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTPS2PD_ZMMf64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5A VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTPS2PD_ZMMf64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5A VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2PD_ZMMf64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2PH (VCVTPS2PH-512-1)
+{
+ICLASS:      VCVTPS2PH
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E11NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x1D V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_B3():w:qq:f16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf32 IMM0:r:b
+IFORM:       VCVTPS2PH_YMMf16_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VCVTPS2PH
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E11NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x1D V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_B3():w:qq:f16:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf32 IMM0:r:b
+IFORM:       VCVTPS2PH_YMMf16_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCVTPS2PH (VCVTPS2PH-512-2)
+{
+ICLASS:      VCVTPS2PH
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR DISP8_HALFMEM
+PATTERN:    EVV 0x1D V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:f16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf32 IMM0:r:b
+IFORM:       VCVTPS2PH_MEMf16_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCVTPS2UDQ (VCVTPS2UDQ-512-1)
+{
+ICLASS:      VCVTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VCVTPS2UDQ_ZMMu32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VCVTPS2UDQ_ZMMu32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2UDQ_ZMMu32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTSD2SI (VCVTSD2SI-128-1)
+{
+ICLASS:      VCVTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:i32 REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2SI_GPR32i32_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:i32:TXT=ROUNDC REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2SI_GPR32i32_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_WRITER_LDOP_Q
+PATTERN:    EVV 0x2D VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_LDOP_Q()
+OPERANDS:    REG0=GPR32_R():w:d:i32 MEM0:r:q:f64
+IFORM:       VCVTSD2SI_GPR32i32_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTSD2SI (VCVTSD2SI-128-2)
+{
+ICLASS:      VCVTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:i64 REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2SI_GPR64i64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:i64:TXT=ROUNDC REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2SI_GPR64i64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_WRITER_LDOP_Q
+PATTERN:    EVV 0x2D VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_LDOP_Q()
+OPERANDS:    REG0=GPR64_R():w:q:i64 MEM0:r:q:f64
+IFORM:       VCVTSD2SI_GPR64i64_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTSD2SS (VCVTSD2SS-128-1)
+{
+ICLASS:      VCVTSD2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2SS_XMMf32_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5A VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2SS_XMMf32_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VCVTSD2SS_XMMf32_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTSD2USI (VCVTSD2USI-128-1)
+{
+ICLASS:      VCVTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:u32 REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2USI_GPR32u32_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:u32:TXT=ROUNDC REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2USI_GPR32u32_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_WRITER_LDOP_Q
+PATTERN:    EVV 0x79 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_LDOP_Q()
+OPERANDS:    REG0=GPR32_R():w:d:u32 MEM0:r:q:f64
+IFORM:       VCVTSD2USI_GPR32u32_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTSD2USI (VCVTSD2USI-128-2)
+{
+ICLASS:      VCVTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:u64 REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2USI_GPR64u64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:u64:TXT=ROUNDC REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTSD2USI_GPR64u64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_WRITER_LDOP_Q
+PATTERN:    EVV 0x79 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_LDOP_Q()
+OPERANDS:    REG0=GPR64_R():w:q:u64 MEM0:r:q:f64
+IFORM:       VCVTSD2USI_GPR64u64_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTSI2SD (VCVTSI2SD-128-1)
+{
+ICLASS:      VCVTSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10NF
+REAL_OPCODE: Y
+ATTRIBUTES:  SIMD_SCALAR
+PATTERN:    EVV 0x2A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 REG2=GPR32_B():r:d:i32
+IFORM:       VCVTSI2SD_XMMf64_XMMf64_GPR32i32_AVX512
+}
+
+{
+ICLASS:      VCVTSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10NF
+REAL_OPCODE: Y
+ATTRIBUTES:  SIMD_SCALAR DISP8_GPR_READER
+PATTERN:    EVV 0x2A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 MEM0:r:d:i32
+IFORM:       VCVTSI2SD_XMMf64_XMMf64_MEMi32_AVX512
+}
+
+
+# EMITTING VCVTSI2SD (VCVTSI2SD-128-2)
+{
+ICLASS:      VCVTSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 REG2=GPR64_B():r:q:i64
+IFORM:       VCVTSI2SD_XMMf64_XMMf64_GPR64i64_AVX512
+}
+
+{
+ICLASS:      VCVTSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2A VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1  mode64    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=ROUNDC REG1=XMM_N3():r:dq:f64 REG2=GPR64_B():r:q:i64
+IFORM:       VCVTSI2SD_XMMf64_XMMf64_GPR64i64_AVX512
+}
+
+{
+ICLASS:      VCVTSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_READER
+PATTERN:    EVV 0x2A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64    ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 MEM0:r:q:i64
+IFORM:       VCVTSI2SD_XMMf64_XMMf64_MEMi64_AVX512
+}
+
+
+# EMITTING VCVTSI2SS (VCVTSI2SS-128-1)
+{
+ICLASS:      VCVTSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 REG2=GPR32_B():r:d:i32
+IFORM:       VCVTSI2SS_XMMf32_XMMf32_GPR32i32_AVX512
+}
+
+{
+ICLASS:      VCVTSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2A VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=XMM_N3():r:dq:f32 REG2=GPR32_B():r:d:i32
+IFORM:       VCVTSI2SS_XMMf32_XMMf32_GPR32i32_AVX512
+}
+
+{
+ICLASS:      VCVTSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_READER
+PATTERN:    EVV 0x2A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 MEM0:r:d:i32
+IFORM:       VCVTSI2SS_XMMf32_XMMf32_MEMi32_AVX512
+}
+
+
+# EMITTING VCVTSI2SS (VCVTSI2SS-128-2)
+{
+ICLASS:      VCVTSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 REG2=GPR64_B():r:q:i64
+IFORM:       VCVTSI2SS_XMMf32_XMMf32_GPR64i64_AVX512
+}
+
+{
+ICLASS:      VCVTSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2A VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1  mode64    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=XMM_N3():r:dq:f32 REG2=GPR64_B():r:q:i64
+IFORM:       VCVTSI2SS_XMMf32_XMMf32_GPR64i64_AVX512
+}
+
+{
+ICLASS:      VCVTSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_READER
+PATTERN:    EVV 0x2A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64    ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 MEM0:r:q:i64
+IFORM:       VCVTSI2SS_XMMf32_XMMf32_MEMi64_AVX512
+}
+
+
+# EMITTING VCVTSS2SD (VCVTSS2SD-128-1)
+{
+ICLASS:      VCVTSS2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2SD_XMMf64_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5A VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2SD_XMMf64_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VCVTSS2SD_XMMf64_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTSS2SI (VCVTSS2SI-128-1)
+{
+ICLASS:      VCVTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:i32 REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2SI_GPR32i32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:i32:TXT=ROUNDC REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2SI_GPR32i32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_LDOP_D MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_LDOP_D()
+OPERANDS:    REG0=GPR32_R():w:d:i32 MEM0:r:d:f32
+IFORM:       VCVTSS2SI_GPR32i32_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTSS2SI (VCVTSS2SI-128-2)
+{
+ICLASS:      VCVTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:i64 REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2SI_GPR64i64_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:i64:TXT=ROUNDC REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2SI_GPR64i64_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_LDOP_D MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_LDOP_D()
+OPERANDS:    REG0=GPR64_R():w:q:i64 MEM0:r:d:f32
+IFORM:       VCVTSS2SI_GPR64i64_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTSS2USI (VCVTSS2USI-128-1)
+{
+ICLASS:      VCVTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:u32 REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2USI_GPR32u32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:u32:TXT=ROUNDC REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2USI_GPR32u32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_LDOP_D MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_LDOP_D()
+OPERANDS:    REG0=GPR32_R():w:d:u32 MEM0:r:d:f32
+IFORM:       VCVTSS2USI_GPR32u32_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTSS2USI (VCVTSS2USI-128-2)
+{
+ICLASS:      VCVTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:u64 REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2USI_GPR64u64_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:u64:TXT=ROUNDC REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTSS2USI_GPR64u64_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_LDOP_D MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x79 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_LDOP_D()
+OPERANDS:    REG0=GPR64_R():w:q:u64 MEM0:r:d:f32
+IFORM:       VCVTSS2USI_GPR64u64_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPD2DQ (VCVTTPD2DQ-512-1)
+{
+ICLASS:      VCVTTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTTPD2DQ_YMMi32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTTPD2DQ_YMMi32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2DQ_YMMi32_MASKmskw_MEMf64_AVX512_VL512
+}
+
+
+# EMITTING VCVTTPD2UDQ (VCVTTPD2UDQ-512-1)
+{
+ICLASS:      VCVTTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTTPD2UDQ_YMMu32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTTPD2UDQ_YMMu32_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2UDQ_YMMu32_MASKmskw_MEMf64_AVX512_VL512
+}
+
+
+# EMITTING VCVTTPS2DQ (VCVTTPS2DQ-512-1)
+{
+ICLASS:      VCVTTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VCVTTPS2DQ_ZMMi32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VCVTTPS2DQ_ZMMi32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2DQ_ZMMi32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2UDQ (VCVTTPS2UDQ-512-1)
+{
+ICLASS:      VCVTTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VCVTTPS2UDQ_ZMMu32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VCVTTPS2UDQ_ZMMu32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2UDQ_ZMMu32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTSD2SI (VCVTTSD2SI-128-1)
+{
+ICLASS:      VCVTTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:i32 REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTTSD2SI_GPR32i32_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:i32:TXT=SAESTR REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTTSD2SI_GPR32i32_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_WRITER_LDOP_Q
+PATTERN:    EVV 0x2C VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_LDOP_Q()
+OPERANDS:    REG0=GPR32_R():w:d:i32 MEM0:r:q:f64
+IFORM:       VCVTTSD2SI_GPR32i32_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTSD2SI (VCVTTSD2SI-128-2)
+{
+ICLASS:      VCVTTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:i64 REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTTSD2SI_GPR64i64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:i64:TXT=SAESTR REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTTSD2SI_GPR64i64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTSD2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_WRITER_LDOP_Q
+PATTERN:    EVV 0x2C VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_LDOP_Q()
+OPERANDS:    REG0=GPR64_R():w:q:i64 MEM0:r:q:f64
+IFORM:       VCVTTSD2SI_GPR64i64_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTSD2USI (VCVTTSD2USI-128-1)
+{
+ICLASS:      VCVTTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:u32 REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTTSD2USI_GPR32u32_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:u32:TXT=SAESTR REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTTSD2USI_GPR32u32_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_WRITER_LDOP_Q
+PATTERN:    EVV 0x78 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_LDOP_Q()
+OPERANDS:    REG0=GPR32_R():w:d:u32 MEM0:r:q:f64
+IFORM:       VCVTTSD2USI_GPR32u32_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTSD2USI (VCVTTSD2USI-128-2)
+{
+ICLASS:      VCVTTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:u64 REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTTSD2USI_GPR64u64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:u64:TXT=SAESTR REG1=XMM_B3():r:dq:f64
+IFORM:       VCVTTSD2USI_GPR64u64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTSD2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_WRITER_LDOP_Q
+PATTERN:    EVV 0x78 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_LDOP_Q()
+OPERANDS:    REG0=GPR64_R():w:q:u64 MEM0:r:q:f64
+IFORM:       VCVTTSD2USI_GPR64u64_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTSS2SI (VCVTTSS2SI-128-1)
+{
+ICLASS:      VCVTTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:i32 REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTTSS2SI_GPR32i32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:i32:TXT=SAESTR REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTTSS2SI_GPR32i32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_LDOP_D MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_LDOP_D()
+OPERANDS:    REG0=GPR32_R():w:d:i32 MEM0:r:d:f32
+IFORM:       VCVTTSS2SI_GPR32i32_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTSS2SI (VCVTTSS2SI-128-2)
+{
+ICLASS:      VCVTTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:i64 REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTTSS2SI_GPR64i64_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:i64:TXT=SAESTR REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTTSS2SI_GPR64i64_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTSS2SI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_LDOP_D MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2C VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_LDOP_D()
+OPERANDS:    REG0=GPR64_R():w:q:i64 MEM0:r:d:f32
+IFORM:       VCVTTSS2SI_GPR64i64_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTSS2USI (VCVTTSS2USI-128-1)
+{
+ICLASS:      VCVTTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:u32 REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTTSS2USI_GPR32u32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_R():w:d:u32:TXT=SAESTR REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTTSS2USI_GPR32u32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_LDOP_D MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_LDOP_D()
+OPERANDS:    REG0=GPR32_R():w:d:u32 MEM0:r:d:f32
+IFORM:       VCVTTSS2USI_GPR32u32_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTSS2USI (VCVTTSS2USI-128-2)
+{
+ICLASS:      VCVTTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:u64 REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTTSS2USI_GPR64u64_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_R():w:q:u64:TXT=SAESTR REG1=XMM_B3():r:dq:f32
+IFORM:       VCVTTSS2USI_GPR64u64_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTSS2USI
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_LDOP_D MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x78 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_LDOP_D()
+OPERANDS:    REG0=GPR64_R():w:q:u64 MEM0:r:d:f32
+IFORM:       VCVTTSS2USI_GPR64u64_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTUDQ2PD (VCVTUDQ2PD-512-1)
+{
+ICLASS:      VCVTUDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32
+IFORM:       VCVTUDQ2PD_ZMMf64_MASKmskw_YMMu32_AVX512
+}
+
+{
+ICLASS:      VCVTUDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VCVTUDQ2PD_ZMMf64_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VCVTUDQ2PS (VCVTUDQ2PS-512-1)
+{
+ICLASS:      VCVTUDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32
+IFORM:       VCVTUDQ2PS_ZMMf32_MASKmskw_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VCVTUDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32
+IFORM:       VCVTUDQ2PS_ZMMf32_MASKmskw_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VCVTUDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VCVTUDQ2PS_ZMMf32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VCVTUSI2SD (VCVTUSI2SD-128-1)
+{
+ICLASS:      VCVTUSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10NF
+REAL_OPCODE: Y
+ATTRIBUTES:  SIMD_SCALAR
+PATTERN:    EVV 0x7B VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 REG2=GPR32_B():r:d:u32
+IFORM:       VCVTUSI2SD_XMMf64_XMMf64_GPR32u32_AVX512
+}
+
+{
+ICLASS:      VCVTUSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10NF
+REAL_OPCODE: Y
+ATTRIBUTES:  SIMD_SCALAR DISP8_GPR_READER
+PATTERN:    EVV 0x7B VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 MEM0:r:d:u32
+IFORM:       VCVTUSI2SD_XMMf64_XMMf64_MEMu32_AVX512
+}
+
+
+# EMITTING VCVTUSI2SD (VCVTUSI2SD-128-2)
+{
+ICLASS:      VCVTUSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x7B VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 REG2=GPR64_B():r:q:u64
+IFORM:       VCVTUSI2SD_XMMf64_XMMf64_GPR64u64_AVX512
+}
+
+{
+ICLASS:      VCVTUSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x7B VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1  mode64    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=ROUNDC REG1=XMM_N3():r:dq:f64 REG2=GPR64_B():r:q:u64
+IFORM:       VCVTUSI2SD_XMMf64_XMMf64_GPR64u64_AVX512
+}
+
+{
+ICLASS:      VCVTUSI2SD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_READER
+PATTERN:    EVV 0x7B VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64    ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 MEM0:r:q:u64
+IFORM:       VCVTUSI2SD_XMMf64_XMMf64_MEMu64_AVX512
+}
+
+
+# EMITTING VCVTUSI2SS (VCVTUSI2SS-128-1)
+{
+ICLASS:      VCVTUSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x7B VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 REG2=GPR32_B():r:d:u32
+IFORM:       VCVTUSI2SS_XMMf32_XMMf32_GPR32u32_AVX512
+}
+
+{
+ICLASS:      VCVTUSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x7B VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=XMM_N3():r:dq:f32 REG2=GPR32_B():r:d:u32
+IFORM:       VCVTUSI2SS_XMMf32_XMMf32_GPR32u32_AVX512
+}
+
+{
+ICLASS:      VCVTUSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_READER
+PATTERN:    EVV 0x7B VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 MEM0:r:d:u32
+IFORM:       VCVTUSI2SS_XMMf32_XMMf32_MEMu32_AVX512
+}
+
+
+# EMITTING VCVTUSI2SS (VCVTUSI2SS-128-2)
+{
+ICLASS:      VCVTUSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x7B VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  mode64    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 REG2=GPR64_B():r:q:u64
+IFORM:       VCVTUSI2SS_XMMf32_XMMf32_GPR64u64_AVX512
+}
+
+{
+ICLASS:      VCVTUSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x7B VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1  mode64    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=XMM_N3():r:dq:f32 REG2=GPR64_B():r:q:u64
+IFORM:       VCVTUSI2SS_XMMf32_XMMf32_GPR64u64_AVX512
+}
+
+{
+ICLASS:      VCVTUSI2SS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_GPR_READER
+PATTERN:    EVV 0x7B VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  mode64    ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 MEM0:r:q:u64
+IFORM:       VCVTUSI2SS_XMMf32_XMMf32_MEMu64_AVX512
+}
+
+
+# EMITTING VDIVPD (VDIVPD-512-1)
+{
+ICLASS:      VDIVPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5E V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VDIVPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VDIVPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5E V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VDIVPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VDIVPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5E V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VDIVPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VDIVPS (VDIVPS-512-1)
+{
+ICLASS:      VDIVPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5E VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VDIVPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VDIVPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5E VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VDIVPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VDIVPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5E VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VDIVPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VDIVSD (VDIVSD-128-1)
+{
+ICLASS:      VDIVSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5E VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VDIVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VDIVSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5E VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VDIVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VDIVSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5E VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VDIVSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VDIVSS (VDIVSS-128-1)
+{
+ICLASS:      VDIVSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5E VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VDIVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VDIVSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5E VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VDIVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VDIVSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5E VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VDIVSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VEXPANDPD (VEXPANDPD-512-1)
+{
+ICLASS:      VEXPANDPD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x88 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f64
+IFORM:       VEXPANDPD_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VEXPANDPD (VEXPANDPD-512-2)
+{
+ICLASS:      VEXPANDPD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x88 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VEXPANDPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+
+# EMITTING VEXPANDPS (VEXPANDPS-512-1)
+{
+ICLASS:      VEXPANDPS
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x88 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f32
+IFORM:       VEXPANDPS_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VEXPANDPS (VEXPANDPS-512-2)
+{
+ICLASS:      VEXPANDPS
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x88 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VEXPANDPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+
+# EMITTING VEXTRACTF32X4 (VEXTRACTF32X4-512-1)
+{
+ICLASS:      VEXTRACTF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x19 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf32 IMM0:r:b
+IFORM:       VEXTRACTF32X4_XMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF32X4 (VEXTRACTF32X4-512-2)
+{
+ICLASS:      VEXTRACTF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x19 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    MEM0:w:dq:f32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf32 IMM0:r:b
+IFORM:       VEXTRACTF32X4_MEMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF64X4 (VEXTRACTF64X4-512-1)
+{
+ICLASS:      VEXTRACTF64X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1B V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_B3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf64 IMM0:r:b
+IFORM:       VEXTRACTF64X4_YMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF64X4 (VEXTRACTF64X4-512-2)
+{
+ICLASS:      VEXTRACTF64X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x1B V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_TUPLE4()
+OPERANDS:    MEM0:w:qq:f64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf64 IMM0:r:b
+IFORM:       VEXTRACTF64X4_MEMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI32X4 (VEXTRACTI32X4-512-1)
+{
+ICLASS:      VEXTRACTI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32 IMM0:r:b
+IFORM:       VEXTRACTI32X4_XMMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI32X4 (VEXTRACTI32X4-512-2)
+{
+ICLASS:      VEXTRACTI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x39 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    MEM0:w:dq:u32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32 IMM0:r:b
+IFORM:       VEXTRACTI32X4_MEMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI64X4 (VEXTRACTI64X4-512-1)
+{
+ICLASS:      VEXTRACTI64X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3B V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_B3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64 IMM0:r:b
+IFORM:       VEXTRACTI64X4_YMMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI64X4 (VEXTRACTI64X4-512-2)
+{
+ICLASS:      VEXTRACTI64X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x3B V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_TUPLE4()
+OPERANDS:    MEM0:w:qq:u64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64 IMM0:r:b
+IFORM:       VEXTRACTI64X4_MEMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTPS (VEXTRACTPS-128-1)
+{
+ICLASS:      VEXTRACTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x17 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR  ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=GPR32_B():w:d:f32 REG1=XMM_R3():r:dq:f32 IMM0:r:b
+IFORM:       VEXTRACTPS_GPR32f32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VEXTRACTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_STORE
+PATTERN:    EVV 0x17 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ZEROING=0 MASK=0 UIMM8()  ESIZE_32_BITS() NELEM_GPR_WRITER_STORE()
+OPERANDS:    MEM0:w:d:f32 REG0=XMM_R3():r:dq:f32 IMM0:r:b
+IFORM:       VEXTRACTPS_MEMf32_XMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VFIXUPIMMPD (VFIXUPIMMPD-512-1)
+{
+ICLASS:      VFIXUPIMMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x54 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VFIXUPIMMPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x54 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VFIXUPIMMPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFIXUPIMMPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VFIXUPIMMPS (VFIXUPIMMPS-512-1)
+{
+ICLASS:      VFIXUPIMMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x54 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VFIXUPIMMPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x54 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VFIXUPIMMPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFIXUPIMMPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VFIXUPIMMSD (VFIXUPIMMSD-128-1)
+{
+ICLASS:      VFIXUPIMMSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x55 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VFIXUPIMMSD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x55 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VFIXUPIMMSD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x55 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1   UIMM8()  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64 IMM0:r:b
+IFORM:       VFIXUPIMMSD_XMMf64_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VFIXUPIMMSS (VFIXUPIMMSS-128-1)
+{
+ICLASS:      VFIXUPIMMSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x55 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VFIXUPIMMSS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x55 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VFIXUPIMMSS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x55 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0   UIMM8()  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+IFORM:       VFIXUPIMMSS_XMMf32_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VFMADD132PD (VFMADD132PD-512-1)
+{
+ICLASS:      VFMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x98 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADD132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x98 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADD132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x98 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD132PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD132PS (VFMADD132PS-512-1)
+{
+ICLASS:      VFMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x98 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADD132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x98 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADD132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x98 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD132PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD132SD (VFMADD132SD-128-1)
+{
+ICLASS:      VFMADD132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x99 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD132SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x99 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD132SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x99 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFMADD132SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD132SS (VFMADD132SS-128-1)
+{
+ICLASS:      VFMADD132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x99 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD132SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x99 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD132SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x99 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFMADD132SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD213PD (VFMADD213PD-512-1)
+{
+ICLASS:      VFMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADD213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADD213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD213PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD213PS (VFMADD213PS-512-1)
+{
+ICLASS:      VFMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADD213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADD213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD213PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD213SD (VFMADD213SD-128-1)
+{
+ICLASS:      VFMADD213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xA9 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD213SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xA9 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD213SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xA9 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFMADD213SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD213SS (VFMADD213SS-128-1)
+{
+ICLASS:      VFMADD213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xA9 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD213SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xA9 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD213SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xA9 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFMADD213SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD231PD (VFMADD231PD-512-1)
+{
+ICLASS:      VFMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADD231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADD231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD231PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD231PS (VFMADD231PS-512-1)
+{
+ICLASS:      VFMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADD231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADD231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD231PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD231SD (VFMADD231SD-128-1)
+{
+ICLASS:      VFMADD231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xB9 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD231SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xB9 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD231SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xB9 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFMADD231SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD231SS (VFMADD231SS-128-1)
+{
+ICLASS:      VFMADD231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xB9 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD231SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xB9 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD231SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xB9 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFMADD231SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB132PD (VFMADDSUB132PD-512-1)
+{
+ICLASS:      VFMADDSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x96 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADDSUB132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x96 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADDSUB132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x96 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB132PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB132PS (VFMADDSUB132PS-512-1)
+{
+ICLASS:      VFMADDSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x96 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADDSUB132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x96 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADDSUB132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x96 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB132PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB213PD (VFMADDSUB213PD-512-1)
+{
+ICLASS:      VFMADDSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADDSUB213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADDSUB213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB213PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB213PS (VFMADDSUB213PS-512-1)
+{
+ICLASS:      VFMADDSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADDSUB213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADDSUB213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB213PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB231PD (VFMADDSUB231PD-512-1)
+{
+ICLASS:      VFMADDSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADDSUB231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMADDSUB231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB231PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB231PS (VFMADDSUB231PS-512-1)
+{
+ICLASS:      VFMADDSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADDSUB231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMADDSUB231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB231PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB132PD (VFMSUB132PD-512-1)
+{
+ICLASS:      VFMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUB132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9A V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUB132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB132PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB132PS (VFMSUB132PS-512-1)
+{
+ICLASS:      VFMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUB132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9A V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUB132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB132PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB132SD (VFMSUB132SD-128-1)
+{
+ICLASS:      VFMSUB132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB132SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9B V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB132SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x9B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFMSUB132SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB132SS (VFMSUB132SS-128-1)
+{
+ICLASS:      VFMSUB132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB132SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9B V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB132SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x9B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFMSUB132SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB213PD (VFMSUB213PD-512-1)
+{
+ICLASS:      VFMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUB213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAA V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUB213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB213PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB213PS (VFMSUB213PS-512-1)
+{
+ICLASS:      VFMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUB213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAA V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUB213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB213PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB213SD (VFMSUB213SD-128-1)
+{
+ICLASS:      VFMSUB213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAB V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB213SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAB V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB213SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xAB V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFMSUB213SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB213SS (VFMSUB213SS-128-1)
+{
+ICLASS:      VFMSUB213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAB V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB213SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAB V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB213SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xAB V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFMSUB213SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB231PD (VFMSUB231PD-512-1)
+{
+ICLASS:      VFMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUB231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBA V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUB231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB231PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB231PS (VFMSUB231PS-512-1)
+{
+ICLASS:      VFMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUB231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBA V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUB231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB231PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB231SD (VFMSUB231SD-128-1)
+{
+ICLASS:      VFMSUB231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBB V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB231SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBB V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB231SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xBB V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFMSUB231SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB231SS (VFMSUB231SS-128-1)
+{
+ICLASS:      VFMSUB231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBB V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB231SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBB V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB231SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xBB V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFMSUB231SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD132PD (VFMSUBADD132PD-512-1)
+{
+ICLASS:      VFMSUBADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x97 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUBADD132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x97 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUBADD132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x97 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD132PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD132PS (VFMSUBADD132PS-512-1)
+{
+ICLASS:      VFMSUBADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x97 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUBADD132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x97 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUBADD132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x97 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD132PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD213PD (VFMSUBADD213PD-512-1)
+{
+ICLASS:      VFMSUBADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUBADD213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUBADD213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD213PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD213PS (VFMSUBADD213PS-512-1)
+{
+ICLASS:      VFMSUBADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUBADD213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUBADD213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD213PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD231PD (VFMSUBADD231PD-512-1)
+{
+ICLASS:      VFMSUBADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUBADD231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFMSUBADD231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD231PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD231PS (VFMSUBADD231PS-512-1)
+{
+ICLASS:      VFMSUBADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUBADD231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFMSUBADD231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD231PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD132PD (VFNMADD132PD-512-1)
+{
+ICLASS:      VFNMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMADD132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9C V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMADD132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD132PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD132PS (VFNMADD132PS-512-1)
+{
+ICLASS:      VFNMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMADD132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9C V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMADD132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD132PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD132SD (VFNMADD132SD-128-1)
+{
+ICLASS:      VFNMADD132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD132SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9D V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD132SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x9D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFNMADD132SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD132SS (VFNMADD132SS-128-1)
+{
+ICLASS:      VFNMADD132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD132SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9D V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD132SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x9D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFNMADD132SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD213PD (VFNMADD213PD-512-1)
+{
+ICLASS:      VFNMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMADD213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAC V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMADD213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD213PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD213PS (VFNMADD213PS-512-1)
+{
+ICLASS:      VFNMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMADD213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAC V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMADD213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD213PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD213SD (VFNMADD213SD-128-1)
+{
+ICLASS:      VFNMADD213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD213SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAD V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD213SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xAD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFNMADD213SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD213SS (VFNMADD213SS-128-1)
+{
+ICLASS:      VFNMADD213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD213SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAD V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD213SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xAD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFNMADD213SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD231PD (VFNMADD231PD-512-1)
+{
+ICLASS:      VFNMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMADD231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBC V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMADD231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD231PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD231PS (VFNMADD231PS-512-1)
+{
+ICLASS:      VFNMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMADD231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBC V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMADD231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD231PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD231SD (VFNMADD231SD-128-1)
+{
+ICLASS:      VFNMADD231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD231SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBD V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD231SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xBD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFNMADD231SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD231SS (VFNMADD231SS-128-1)
+{
+ICLASS:      VFNMADD231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD231SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBD V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD231SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xBD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFNMADD231SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB132PD (VFNMSUB132PD-512-1)
+{
+ICLASS:      VFNMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMSUB132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9E V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMSUB132PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB132PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB132PS (VFNMSUB132PS-512-1)
+{
+ICLASS:      VFNMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMSUB132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9E V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMSUB132PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB132PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB132SD (VFNMSUB132SD-128-1)
+{
+ICLASS:      VFNMSUB132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB132SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9F V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB132SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x9F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFNMSUB132SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB132SS (VFNMSUB132SS-128-1)
+{
+ICLASS:      VFNMSUB132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB132SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x9F V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB132SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x9F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFNMSUB132SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB213PD (VFNMSUB213PD-512-1)
+{
+ICLASS:      VFNMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMSUB213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAE V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMSUB213PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB213PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB213PS (VFNMSUB213PS-512-1)
+{
+ICLASS:      VFNMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMSUB213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAE V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMSUB213PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB213PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB213SD (VFNMSUB213SD-128-1)
+{
+ICLASS:      VFNMSUB213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB213SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAF V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB213SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xAF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFNMSUB213SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB213SS (VFNMSUB213SS-128-1)
+{
+ICLASS:      VFNMSUB213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB213SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xAF V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB213SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xAF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFNMSUB213SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB231PD (VFNMSUB231PD-512-1)
+{
+ICLASS:      VFNMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMSUB231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBE V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VFNMSUB231PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB231PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB231PS (VFNMSUB231PS-512-1)
+{
+ICLASS:      VFNMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMSUB231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBE V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VFNMSUB231PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB231PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB231SD (VFNMSUB231SD-128-1)
+{
+ICLASS:      VFNMSUB231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB231SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBF V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB231SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231SD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xBF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VFNMSUB231SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB231SS (VFNMSUB231SS-128-1)
+{
+ICLASS:      VFNMSUB231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB231SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0xBF V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB231SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231SS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0xBF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VFNMSUB231SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VGATHERDPD (VGATHERDPD-512-1)
+{
+ICLASS:      VGATHERDPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x92 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W1 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASKNOT0():rw:mskw MEM0:r:q:f64
+IFORM:       VGATHERDPD_ZMMf64_MASKmskw_MEMf64_AVX512_VL512
+}
+
+
+# EMITTING VGATHERDPS (VGATHERDPS-512-1)
+{
+ICLASS:      VGATHERDPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x92 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W0 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASKNOT0():rw:mskw MEM0:r:d:f32
+IFORM:       VGATHERDPS_ZMMf32_MASKmskw_MEMf32_AVX512_VL512
+}
+
+
+# EMITTING VGATHERQPD (VGATHERQPD-512-1)
+{
+ICLASS:      VGATHERQPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x93 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W1 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASKNOT0():rw:mskw MEM0:r:q:f64
+IFORM:       VGATHERQPD_ZMMf64_MASKmskw_MEMf64_AVX512_VL512
+}
+
+
+# EMITTING VGATHERQPS (VGATHERQPS-512-1)
+{
+ICLASS:      VGATHERQPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x93 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W0 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASKNOT0():rw:mskw MEM0:r:d:f32
+IFORM:       VGATHERQPS_YMMf32_MASKmskw_MEMf32_AVX512_VL512
+}
+
+
+# EMITTING VGETEXPPD (VGETEXPPD-512-1)
+{
+ICLASS:      VGETEXPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x42 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VGETEXPPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VGETEXPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x42 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VGETEXPPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VGETEXPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x42 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VGETEXPPD_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VGETEXPPS (VGETEXPPS-512-1)
+{
+ICLASS:      VGETEXPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x42 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VGETEXPPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VGETEXPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x42 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VGETEXPPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VGETEXPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x42 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VGETEXPPS_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VGETEXPSD (VGETEXPSD-128-1)
+{
+ICLASS:      VGETEXPSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x43 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VGETEXPSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VGETEXPSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x43 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VGETEXPSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VGETEXPSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x43 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VGETEXPSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VGETEXPSS (VGETEXPSS-128-1)
+{
+ICLASS:      VGETEXPSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x43 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VGETEXPSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VGETEXPSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x43 V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VGETEXPSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VGETEXPSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x43 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VGETEXPSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VGETMANTPD (VGETMANTPD-512-1)
+{
+ICLASS:      VGETMANTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x26 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VGETMANTPD_ZMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x26 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VGETMANTPD_ZMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x26 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGETMANTPD_ZMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VGETMANTPS (VGETMANTPS-512-1)
+{
+ICLASS:      VGETMANTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x26 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VGETMANTPS_ZMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x26 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VGETMANTPS_ZMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x26 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGETMANTPS_ZMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VGETMANTSD (VGETMANTSD-128-1)
+{
+ICLASS:      VGETMANTSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x27 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VGETMANTSD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x27 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VGETMANTSD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x27 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1   UIMM8()  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64 IMM0:r:b
+IFORM:       VGETMANTSD_XMMf64_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VGETMANTSS (VGETMANTSS-128-1)
+{
+ICLASS:      VGETMANTSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x27 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VGETMANTSS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x27 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VGETMANTSS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x27 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0   UIMM8()  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+IFORM:       VGETMANTSS_XMMf32_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTF32X4 (VINSERTF32X4-512-1)
+{
+ICLASS:      VINSERTF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x18 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VINSERTF32X4_ZMMf32_MASKmskw_ZMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x18 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:dq:f32 IMM0:r:b
+IFORM:       VINSERTF32X4_ZMMf32_MASKmskw_ZMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTF64X4 (VINSERTF64X4-512-1)
+{
+ICLASS:      VINSERTF64X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1A V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VINSERTF64X4_ZMMf64_MASKmskw_ZMMf64_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTF64X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x1A V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:qq:f64 IMM0:r:b
+IFORM:       VINSERTF64X4_ZMMf64_MASKmskw_ZMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTI32X4 (VINSERTI32X4-512-1)
+{
+ICLASS:      VINSERTI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x38 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VINSERTI32X4_ZMMu32_MASKmskw_ZMMu32_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x38 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:dq:u32 IMM0:r:b
+IFORM:       VINSERTI32X4_ZMMu32_MASKmskw_ZMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTI64X4 (VINSERTI64X4-512-1)
+{
+ICLASS:      VINSERTI64X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3A V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VINSERTI64X4_ZMMu64_MASKmskw_ZMMu64_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTI64X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x3A V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:qq:u64 IMM0:r:b
+IFORM:       VINSERTI64X4_ZMMu64_MASKmskw_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTPS (VINSERTPS-128-1)
+{
+ICLASS:      VINSERTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x21 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 REG2=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VINSERTPS_XMMf32_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_TUPLE1
+PATTERN:    EVV 0x21 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0 MASK=0 UIMM8()  ESIZE_32_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+IFORM:       VINSERTPS_XMMf32_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VMAXPD (VMAXPD-512-1)
+{
+ICLASS:      VMAXPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VMAXPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMAXPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5F V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VMAXPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMAXPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMAXPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMAXPS (VMAXPS-512-1)
+{
+ICLASS:      VMAXPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5F VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VMAXPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMAXPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5F VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VMAXPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMAXPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5F VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMAXPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMAXSD (VMAXSD-128-1)
+{
+ICLASS:      VMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMAXSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5F VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMAXSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VMAXSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMAXSS (VMAXSS-128-1)
+{
+ICLASS:      VMAXSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMAXSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMAXSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5F VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMAXSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMAXSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VMAXSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMINPD (VMINPD-512-1)
+{
+ICLASS:      VMINPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5D V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VMINPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMINPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5D V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VMINPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMINPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5D V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMINPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMINPS (VMINPS-512-1)
+{
+ICLASS:      VMINPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5D VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VMINPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMINPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5D VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VMINPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMINPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5D VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMINPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMINSD (VMINSD-128-1)
+{
+ICLASS:      VMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5D VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMINSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5D VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMINSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5D VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VMINSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMINSS (VMINSS-128-1)
+{
+ICLASS:      VMINSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5D VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMINSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMINSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5D VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMINSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMINSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5D VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VMINSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-512-1)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VMOVAPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x28 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f64
+IFORM:       VMOVAPD_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-512-2)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf64
+IFORM:       VMOVAPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-512-3)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x29 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:f64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf64
+IFORM:       VMOVAPD_MEMf64_MASKmskw_ZMMf64_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-512-1)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VMOVAPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x28 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f32
+IFORM:       VMOVAPS_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-512-2)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf32
+IFORM:       VMOVAPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-512-3)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x29 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:f32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf32
+IFORM:       VMOVAPS_MEMf32_MASKmskw_ZMMf32_AVX512
+}
+
+
+# EMITTING VMOVD (VMOVD-128-1)
+{
+ICLASS:      VMOVD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x6E V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=GPR32_B():r:d:u32
+IFORM:       VMOVD_XMMu32_GPR32u32_AVX512
+}
+
+{
+ICLASS:      VMOVD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_READER
+PATTERN:    EVV 0x6E V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 MEM0:r:d:u32
+IFORM:       VMOVD_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVD (VMOVD-128-2)
+{
+ICLASS:      VMOVD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x7E V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR32_B():w:d:u32 REG1=XMM_R3():r:dq:u32
+IFORM:       VMOVD_GPR32u32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VMOVD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_STORE
+PATTERN:    EVV 0x7E V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_GPR_WRITER_STORE()
+OPERANDS:    MEM0:w:d:u32 REG0=XMM_R3():r:dq:u32
+IFORM:       VMOVD_MEMu32_XMMu32_AVX512
+}
+
+
+# EMITTING VMOVDDUP (VMOVDDUP-512-1)
+{
+ICLASS:      VMOVDDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VMOVDDUP_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVDDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MOVDDUP
+PATTERN:    EVV 0x12 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_MOVDDUP()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f64
+IFORM:       VMOVDDUP_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-512-1)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32
+IFORM:       VMOVDQA32_ZMMu32_MASKmskw_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x6F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u32
+IFORM:       VMOVDQA32_ZMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-512-2)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32
+IFORM:       VMOVDQA32_ZMMu32_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-512-3)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x7F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:u32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32
+IFORM:       VMOVDQA32_MEMu32_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-512-1)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VMOVDQA64_ZMMu64_MASKmskw_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x6F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u64
+IFORM:       VMOVDQA64_ZMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-512-2)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VMOVDQA64_ZMMu64_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-512-3)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x7F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:u64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VMOVDQA64_MEMu64_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-512-1)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32
+IFORM:       VMOVDQU32_ZMMu32_MASKmskw_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u32
+IFORM:       VMOVDQU32_ZMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-512-2)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32
+IFORM:       VMOVDQU32_ZMMu32_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-512-3)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:u32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32
+IFORM:       VMOVDQU32_MEMu32_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-512-1)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VMOVDQU64_ZMMu64_MASKmskw_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u64
+IFORM:       VMOVDQU64_ZMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-512-2)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VMOVDQU64_ZMMu64_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-512-3)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:u64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VMOVDQU64_MEMu64_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VMOVHLPS (VMOVHLPS-128-1)
+{
+ICLASS:      VMOVHLPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E7NM128
+REAL_OPCODE: Y
+PATTERN:    EVV 0x12 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 REG2=XMM_B3():r:dq:f32
+IFORM:       VMOVHLPS_XMMf32_XMMf32_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVHPD (VMOVHPD-128-1)
+{
+ICLASS:      VMOVHPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_SCALAR
+PATTERN:    EVV 0x16 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:q:f64 MEM0:r:q:f64
+IFORM:       VMOVHPD_XMMf64_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVHPD (VMOVHPD-128-2)
+{
+ICLASS:      VMOVHPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_SCALAR
+PATTERN:    EVV 0x17 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    MEM0:w:q:f64 REG0=XMM_R3():r:dq:f64
+IFORM:       VMOVHPD_MEMf64_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVHPS (VMOVHPS-128-1)
+{
+ICLASS:      VMOVHPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_TUPLE2
+PATTERN:    EVV 0x16 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:q:f32 MEM0:r:q:f32
+IFORM:       VMOVHPS_XMMf32_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVHPS (VMOVHPS-128-2)
+{
+ICLASS:      VMOVHPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_TUPLE2
+PATTERN:    EVV 0x17 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    MEM0:w:q:f32 REG0=XMM_R3():r:dq:f32
+IFORM:       VMOVHPS_MEMf32_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVLHPS (VMOVLHPS-128-1)
+{
+ICLASS:      VMOVLHPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E7NM128
+REAL_OPCODE: Y
+PATTERN:    EVV 0x16 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:q:f32 REG2=XMM_B3():r:q:f32
+IFORM:       VMOVLHPS_XMMf32_XMMf32_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVLPD (VMOVLPD-128-1)
+{
+ICLASS:      VMOVLPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_SCALAR
+PATTERN:    EVV 0x12 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VMOVLPD_XMMf64_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVLPD (VMOVLPD-128-2)
+{
+ICLASS:      VMOVLPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_SCALAR
+PATTERN:    EVV 0x13 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    MEM0:w:q:f64 REG0=XMM_R3():r:q:f64
+IFORM:       VMOVLPD_MEMf64_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVLPS (VMOVLPS-128-1)
+{
+ICLASS:      VMOVLPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_TUPLE2
+PATTERN:    EVV 0x12 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=XMM_N3():r:dq:f32 MEM0:r:q:f32
+IFORM:       VMOVLPS_XMMf32_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVLPS (VMOVLPS-128-2)
+{
+ICLASS:      VMOVLPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_TUPLE2
+PATTERN:    EVV 0x13 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    MEM0:w:q:f32 REG0=XMM_R3():r:q:f32
+IFORM:       VMOVLPS_MEMf32_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVNTDQ (VMOVNTDQ-512-1)
+{
+ICLASS:      VMOVNTDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0xE7 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:u32 REG0=ZMM_R3():r:zu32
+IFORM:       VMOVNTDQ_MEMu32_ZMMu32_AVX512
+}
+
+
+# EMITTING VMOVNTDQA (VMOVNTDQA-512-1)
+{
+ICLASS:      VMOVNTDQA
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu32 MEM0:r:zd:u32
+IFORM:       VMOVNTDQA_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVNTPD (VMOVNTPD-512-1)
+{
+ICLASS:      VMOVNTPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:f64 REG0=ZMM_R3():r:zf64
+IFORM:       VMOVNTPD_MEMf64_ZMMf64_AVX512
+}
+
+
+# EMITTING VMOVNTPS (VMOVNTPS-512-1)
+{
+ICLASS:      VMOVNTPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:f32 REG0=ZMM_R3():r:zf32
+IFORM:       VMOVNTPS_MEMf32_ZMMf32_AVX512
+}
+
+
+# EMITTING VMOVQ (VMOVQ-128-1)
+{
+ICLASS:      VMOVQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x6E V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=GPR64_B():r:q:u64
+IFORM:       VMOVQ_XMMu64_GPR64u64_AVX512
+}
+
+{
+ICLASS:      VMOVQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_READER
+PATTERN:    EVV 0x6E V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 MEM0:r:q:u64
+IFORM:       VMOVQ_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VMOVQ (VMOVQ-128-2)
+{
+ICLASS:      VMOVQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x7E V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  mode64  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=GPR64_B():w:q:u64 REG1=XMM_R3():r:dq:u64
+IFORM:       VMOVQ_GPR64u64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_STORE
+PATTERN:    EVV 0x7E V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  mode64  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_GPR_WRITER_STORE()
+OPERANDS:    MEM0:w:q:u64 REG0=XMM_R3():r:dq:u64
+IFORM:       VMOVQ_MEMu64_XMMu64_AVX512
+}
+
+
+# EMITTING VMOVQ (VMOVQ-128-3)
+{
+ICLASS:      VMOVQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x7E VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=XMM_B3():r:dq:u64
+IFORM:       VMOVQ_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_SCALAR
+PATTERN:    EVV 0x7E VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 MEM0:r:q:u64
+IFORM:       VMOVQ_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VMOVQ (VMOVQ-128-4)
+{
+ICLASS:      VMOVQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0xD6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_B3():w:dq:u64 REG1=XMM_R3():r:dq:u64
+IFORM:       VMOVQ_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_SCALAR
+PATTERN:    EVV 0xD6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    MEM0:w:q:u64 REG0=XMM_R3():r:dq:u64
+IFORM:       VMOVQ_MEMu64_XMMu64_AVX512
+}
+
+
+# EMITTING VMOVSD (VMOVSD-128-1)
+{
+ICLASS:      VMOVSD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x10 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:f64
+IFORM:       VMOVSD_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVSD (VMOVSD-128-2)
+{
+ICLASS:      VMOVSD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x11 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    MEM0:w:q:f64 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f64
+IFORM:       VMOVSD_MEMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVSD (VMOVSD-128-3)
+{
+ICLASS:      VMOVSD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX SIMD_SCALAR
+PATTERN:    EVV 0x10 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMOVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVSD (VMOVSD-128-4)
+{
+ICLASS:      VMOVSD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX SIMD_SCALAR
+PATTERN:    EVV 0x11 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_B3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_R3():r:dq:f64
+IFORM:       VMOVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVSHDUP (VMOVSHDUP-512-1)
+{
+ICLASS:      VMOVSHDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x16 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VMOVSHDUP_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVSHDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x16 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f32
+IFORM:       VMOVSHDUP_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVSLDUP (VMOVSLDUP-512-1)
+{
+ICLASS:      VMOVSLDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VMOVSLDUP_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVSLDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x12 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f32
+IFORM:       VMOVSLDUP_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVSS (VMOVSS-128-1)
+{
+ICLASS:      VMOVSS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x10 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:f32
+IFORM:       VMOVSS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVSS (VMOVSS-128-2)
+{
+ICLASS:      VMOVSS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x11 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    MEM0:w:d:f32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f32
+IFORM:       VMOVSS_MEMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVSS (VMOVSS-128-3)
+{
+ICLASS:      VMOVSS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX SIMD_SCALAR
+PATTERN:    EVV 0x10 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMOVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVSS (VMOVSS-128-4)
+{
+ICLASS:      VMOVSS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX SIMD_SCALAR
+PATTERN:    EVV 0x11 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_B3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_R3():r:dq:f32
+IFORM:       VMOVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-512-1)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VMOVUPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f64
+IFORM:       VMOVUPD_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-512-2)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf64
+IFORM:       VMOVUPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-512-3)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:f64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf64
+IFORM:       VMOVUPD_MEMf64_MASKmskw_ZMMf64_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-512-1)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VMOVUPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:f32
+IFORM:       VMOVUPS_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-512-2)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf32
+IFORM:       VMOVUPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-512-3)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:f32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf32
+IFORM:       VMOVUPS_MEMf32_MASKmskw_ZMMf32_AVX512
+}
+
+
+# EMITTING VMULPD (VMULPD-512-1)
+{
+ICLASS:      VMULPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x59 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VMULPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMULPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x59 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VMULPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VMULPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x59 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMULPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMULPS (VMULPS-512-1)
+{
+ICLASS:      VMULPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x59 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VMULPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMULPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x59 VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VMULPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VMULPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x59 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMULPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMULSD (VMULSD-128-1)
+{
+ICLASS:      VMULSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x59 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMULSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMULSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x59 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMULSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMULSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x59 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VMULSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMULSS (VMULSS-128-1)
+{
+ICLASS:      VMULSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x59 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMULSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMULSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x59 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMULSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMULSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x59 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VMULSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPABSD (VPABSD-512-1)
+{
+ICLASS:      VPABSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zi32
+IFORM:       VPABSD_ZMMi32_MASKmskw_ZMMi32_AVX512
+}
+
+{
+ICLASS:      VPABSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPABSD_ZMMi32_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPABSQ (VPABSQ-512-1)
+{
+ICLASS:      VPABSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zi64
+IFORM:       VPABSQ_ZMMi64_MASKmskw_ZMMi64_AVX512
+}
+
+{
+ICLASS:      VPABSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPABSQ_ZMMi64_MASKmskw_MEMi64_AVX512
+}
+
+
+# EMITTING VPADDD (VPADDD-512-1)
+{
+ICLASS:      VPADDD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPADDD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPADDD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPADDD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPADDQ (VPADDQ-512-1)
+{
+ICLASS:      VPADDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPADDQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPADDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xD4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPADDQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPANDD (VPANDD-512-1)
+{
+ICLASS:      VPANDD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPANDD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPANDD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPANDD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPANDND (VPANDND-512-1)
+{
+ICLASS:      VPANDND
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPANDND_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPANDND
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPANDND_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPANDNQ (VPANDNQ-512-1)
+{
+ICLASS:      VPANDNQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPANDNQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPANDNQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPANDNQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPANDQ (VPANDQ-512-1)
+{
+ICLASS:      VPANDQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPANDQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPANDQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPANDQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPBLENDMD (VPBLENDMD-512-1)
+{
+ICLASS:      VPBLENDMD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPBLENDMD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPBLENDMD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPBLENDMD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPBLENDMQ (VPBLENDMQ-512-1)
+{
+ICLASS:      VPBLENDMQ
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPBLENDMQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPBLENDMQ
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPBLENDMQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-512-1)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x58 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:u32 EMX_BROADCAST_1TO16_32
+IFORM:       VPBROADCASTD_ZMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-512-2)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x58 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 EMX_BROADCAST_1TO16_32
+IFORM:       VPBROADCASTD_ZMMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-512-3)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u32 EMX_BROADCAST_1TO16_32
+IFORM:       VPBROADCASTD_ZMMu32_MASKmskw_GPR32u32_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-512-1)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x59 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:u64 EMX_BROADCAST_1TO8_64
+IFORM:       VPBROADCASTQ_ZMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-512-2)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x59 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64 EMX_BROADCAST_1TO8_64
+IFORM:       VPBROADCASTQ_ZMMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-512-3)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  mode64  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR64_B():r:q:u64 EMX_BROADCAST_1TO8_64
+IFORM:       VPBROADCASTQ_ZMMu64_MASKmskw_GPR64u64_AVX512
+}
+
+
+# EMITTING VPCMPD (VPCMPD-512-1)
+{
+ICLASS:      VPCMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi32 REG3=ZMM_B3():r:zi32 IMM0:r:b
+IFORM:       VPCMPD_MASKmskw_MASKmskw_ZMMi32_ZMMi32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi32 MEM0:r:vv:i32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPD_MASKmskw_MASKmskw_ZMMi32_MEMi32_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPEQD (VPCMPEQD-512-1)
+{
+ICLASS:      VPCMPEQD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPCMPEQD_MASKmskw_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPCMPEQD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPCMPEQD_MASKmskw_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPCMPEQQ (VPCMPEQQ-512-1)
+{
+ICLASS:      VPCMPEQQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPCMPEQQ_MASKmskw_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPCMPEQQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x29 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPCMPEQQ_MASKmskw_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPCMPGTD (VPCMPGTD-512-1)
+{
+ICLASS:      VPCMPGTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x66 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi32 REG3=ZMM_B3():r:zi32
+IFORM:       VPCMPGTD_MASKmskw_MASKmskw_ZMMi32_ZMMi32_AVX512
+}
+
+{
+ICLASS:      VPCMPGTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPCMPGTD_MASKmskw_MASKmskw_ZMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPCMPGTQ (VPCMPGTQ-512-1)
+{
+ICLASS:      VPCMPGTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x37 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi64 REG3=ZMM_B3():r:zi64
+IFORM:       VPCMPGTQ_MASKmskw_MASKmskw_ZMMi64_ZMMi64_AVX512
+}
+
+{
+ICLASS:      VPCMPGTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x37 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPCMPGTQ_MASKmskw_MASKmskw_ZMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPCMPQ (VPCMPQ-512-1)
+{
+ICLASS:      VPCMPQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi64 REG3=ZMM_B3():r:zi64 IMM0:r:b
+IFORM:       VPCMPQ_MASKmskw_MASKmskw_ZMMi64_ZMMi64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi64 MEM0:r:vv:i64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPQ_MASKmskw_MASKmskw_ZMMi64_MEMi64_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUD (VPCMPUD-512-1)
+{
+ICLASS:      VPCMPUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPCMPUD_MASKmskw_MASKmskw_ZMMu32_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPUD_MASKmskw_MASKmskw_ZMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUQ (VPCMPUQ-512-1)
+{
+ICLASS:      VPCMPUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPCMPUQ_MASKmskw_MASKmskw_ZMMu64_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPUQ_MASKmskw_MASKmskw_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPCOMPRESSD (VPCOMPRESSD-512-1)
+{
+ICLASS:      VPCOMPRESSD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:zd:u32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32
+IFORM:       VPCOMPRESSD_MEMu32_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPCOMPRESSD (VPCOMPRESSD-512-2)
+{
+ICLASS:      VPCOMPRESSD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32
+IFORM:       VPCOMPRESSD_ZMMu32_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPCOMPRESSQ (VPCOMPRESSQ-512-1)
+{
+ICLASS:      VPCOMPRESSQ
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:zd:u64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPCOMPRESSQ_MEMu64_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPCOMPRESSQ (VPCOMPRESSQ-512-2)
+{
+ICLASS:      VPCOMPRESSQ
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VPCOMPRESSQ_ZMMu64_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPERMD (VPERMD-512-1)
+{
+ICLASS:      VPERMD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x36 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPERMD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPERMD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x36 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPERMD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPERMI2D (VPERMI2D-512-1)
+{
+ICLASS:      VPERMI2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPERMI2D_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPERMI2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPERMI2D_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPERMI2PD (VPERMI2PD-512-1)
+{
+ICLASS:      VPERMI2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x77 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VPERMI2PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMI2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x77 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMI2PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMI2PS (VPERMI2PS-512-1)
+{
+ICLASS:      VPERMI2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x77 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VPERMI2PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMI2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x77 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMI2PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMI2Q (VPERMI2Q-512-1)
+{
+ICLASS:      VPERMI2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPERMI2Q_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPERMI2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPERMI2Q_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPERMILPD (VPERMILPD-512-1)
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x05 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VPERMILPD_ZMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x05 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMILPD_ZMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VPERMILPD (VPERMILPD-512-2)
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VPERMILPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x0D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMILPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMILPS (VPERMILPS-512-1)
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x04 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VPERMILPS_ZMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x04 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMILPS_ZMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VPERMILPS (VPERMILPS-512-2)
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VPERMILPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x0C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMILPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMPD (VPERMPD-512-1)
+{
+ICLASS:      VPERMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x01 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VPERMPD_ZMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x01 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMPD_ZMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VPERMPD (VPERMPD-512-2)
+{
+ICLASS:      VPERMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x16 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VPERMPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x16 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMPS (VPERMPS-512-1)
+{
+ICLASS:      VPERMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x16 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VPERMPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x16 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMQ (VPERMQ-512-1)
+{
+ICLASS:      VPERMQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x00 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPERMQ_ZMMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x00 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMQ_ZMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPERMQ (VPERMQ-512-2)
+{
+ICLASS:      VPERMQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x36 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPERMQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPERMQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x36 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPERMQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPERMT2D (VPERMT2D-512-1)
+{
+ICLASS:      VPERMT2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPERMT2D_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPERMT2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPERMT2D_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPERMT2PD (VPERMT2PD-512-1)
+{
+ICLASS:      VPERMT2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VPERMT2PD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMT2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMT2PD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMT2PS (VPERMT2PS-512-1)
+{
+ICLASS:      VPERMT2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VPERMT2PS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMT2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMT2PS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMT2Q (VPERMT2Q-512-1)
+{
+ICLASS:      VPERMT2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPERMT2Q_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPERMT2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPERMT2Q_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPEXPANDD (VPEXPANDD-512-1)
+{
+ICLASS:      VPEXPANDD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x89 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u32
+IFORM:       VPEXPANDD_ZMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPEXPANDD (VPEXPANDD-512-2)
+{
+ICLASS:      VPEXPANDD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x89 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32
+IFORM:       VPEXPANDD_ZMMu32_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPEXPANDQ (VPEXPANDQ-512-1)
+{
+ICLASS:      VPEXPANDQ
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x89 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u64
+IFORM:       VPEXPANDQ_ZMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPEXPANDQ (VPEXPANDQ-512-2)
+{
+ICLASS:      VPEXPANDQ
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x89 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VPEXPANDQ_ZMMu64_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPGATHERDD (VPGATHERDD-512-1)
+{
+ICLASS:      VPGATHERDD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x90 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W0 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASKNOT0():rw:mskw MEM0:r:d:u32
+IFORM:       VPGATHERDD_ZMMu32_MASKmskw_MEMu32_AVX512_VL512
+}
+
+
+# EMITTING VPGATHERDQ (VPGATHERDQ-512-1)
+{
+ICLASS:      VPGATHERDQ
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x90 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W1 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASKNOT0():rw:mskw MEM0:r:q:u64
+IFORM:       VPGATHERDQ_ZMMu64_MASKmskw_MEMu64_AVX512_VL512
+}
+
+
+# EMITTING VPGATHERQD (VPGATHERQD-512-1)
+{
+ICLASS:      VPGATHERQD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x91 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W0 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASKNOT0():rw:mskw MEM0:r:d:u32
+IFORM:       VPGATHERQD_YMMu32_MASKmskw_MEMu32_AVX512_VL512
+}
+
+
+# EMITTING VPGATHERQQ (VPGATHERQQ-512-1)
+{
+ICLASS:      VPGATHERQQ
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x91 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W1 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASKNOT0():rw:mskw MEM0:r:q:u64
+IFORM:       VPGATHERQQ_ZMMu64_MASKmskw_MEMu64_AVX512_VL512
+}
+
+
+# EMITTING VPMAXSD (VPMAXSD-512-1)
+{
+ICLASS:      VPMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi32 REG3=ZMM_B3():r:zi32
+IFORM:       VPMAXSD_ZMMi32_MASKmskw_ZMMi32_ZMMi32_AVX512
+}
+
+{
+ICLASS:      VPMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMAXSD_ZMMi32_MASKmskw_ZMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMAXSQ (VPMAXSQ-512-1)
+{
+ICLASS:      VPMAXSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi64 REG3=ZMM_B3():r:zi64
+IFORM:       VPMAXSQ_ZMMi64_MASKmskw_ZMMi64_ZMMi64_AVX512
+}
+
+{
+ICLASS:      VPMAXSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPMAXSQ_ZMMi64_MASKmskw_ZMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPMAXUD (VPMAXUD-512-1)
+{
+ICLASS:      VPMAXUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPMAXUD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPMAXUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMAXUD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMAXUQ (VPMAXUQ-512-1)
+{
+ICLASS:      VPMAXUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPMAXUQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPMAXUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMAXUQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMINSD (VPMINSD-512-1)
+{
+ICLASS:      VPMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi32 REG3=ZMM_B3():r:zi32
+IFORM:       VPMINSD_ZMMi32_MASKmskw_ZMMi32_ZMMi32_AVX512
+}
+
+{
+ICLASS:      VPMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x39 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMINSD_ZMMi32_MASKmskw_ZMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMINSQ (VPMINSQ-512-1)
+{
+ICLASS:      VPMINSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi64 REG3=ZMM_B3():r:zi64
+IFORM:       VPMINSQ_ZMMi64_MASKmskw_ZMMi64_ZMMi64_AVX512
+}
+
+{
+ICLASS:      VPMINSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x39 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPMINSQ_ZMMi64_MASKmskw_ZMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPMINUD (VPMINUD-512-1)
+{
+ICLASS:      VPMINUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPMINUD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPMINUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMINUD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMINUQ (VPMINUQ-512-1)
+{
+ICLASS:      VPMINUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPMINUQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPMINUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMINUQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMOVDB (VPMOVDB-512-1)
+{
+ICLASS:      VPMOVDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x31 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32
+IFORM:       VPMOVDB_XMMu8_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDB (VPMOVDB-512-2)
+{
+ICLASS:      VPMOVDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x31 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:dq:u8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32
+IFORM:       VPMOVDB_MEMu8_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDW (VPMOVDW-512-1)
+{
+ICLASS:      VPMOVDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x33 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32
+IFORM:       VPMOVDW_YMMu16_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDW (VPMOVDW-512-2)
+{
+ICLASS:      VPMOVDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x33 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:u16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32
+IFORM:       VPMOVDW_MEMu16_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVQB (VPMOVQB-512-1)
+{
+ICLASS:      VPMOVQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x32 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VPMOVQB_XMMu8_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQB (VPMOVQB-512-2)
+{
+ICLASS:      VPMOVQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x32 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:q:u8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPMOVQB_MEMu8_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQD (VPMOVQD-512-1)
+{
+ICLASS:      VPMOVQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x35 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VPMOVQD_YMMu32_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQD (VPMOVQD-512-2)
+{
+ICLASS:      VPMOVQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x35 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:u32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPMOVQD_MEMu32_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQW (VPMOVQW-512-1)
+{
+ICLASS:      VPMOVQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x34 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VPMOVQW_XMMu16_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQW (VPMOVQW-512-2)
+{
+ICLASS:      VPMOVQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x34 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:dq:u16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPMOVQW_MEMu16_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVSDB (VPMOVSDB-512-1)
+{
+ICLASS:      VPMOVSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x21 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zi32
+IFORM:       VPMOVSDB_XMMi8_MASKmskw_ZMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDB (VPMOVSDB-512-2)
+{
+ICLASS:      VPMOVSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x21 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:dq:i8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zi32
+IFORM:       VPMOVSDB_MEMi8_MASKmskw_ZMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDW (VPMOVSDW-512-1)
+{
+ICLASS:      VPMOVSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zi32
+IFORM:       VPMOVSDW_YMMi16_MASKmskw_ZMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDW (VPMOVSDW-512-2)
+{
+ICLASS:      VPMOVSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x23 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:i16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zi32
+IFORM:       VPMOVSDW_MEMi16_MASKmskw_ZMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSQB (VPMOVSQB-512-1)
+{
+ICLASS:      VPMOVSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x22 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zi64
+IFORM:       VPMOVSQB_XMMi8_MASKmskw_ZMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQB (VPMOVSQB-512-2)
+{
+ICLASS:      VPMOVSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x22 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:q:i8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zi64
+IFORM:       VPMOVSQB_MEMi8_MASKmskw_ZMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQD (VPMOVSQD-512-1)
+{
+ICLASS:      VPMOVSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zi64
+IFORM:       VPMOVSQD_YMMi32_MASKmskw_ZMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQD (VPMOVSQD-512-2)
+{
+ICLASS:      VPMOVSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x25 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:i32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zi64
+IFORM:       VPMOVSQD_MEMi32_MASKmskw_ZMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQW (VPMOVSQW-512-1)
+{
+ICLASS:      VPMOVSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x24 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zi64
+IFORM:       VPMOVSQW_XMMi16_MASKmskw_ZMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQW (VPMOVSQW-512-2)
+{
+ICLASS:      VPMOVSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x24 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:dq:i16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zi64
+IFORM:       VPMOVSQW_MEMi16_MASKmskw_ZMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSXBD (VPMOVSXBD-512-1)
+{
+ICLASS:      VPMOVSXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x21 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVSXBD_ZMMi32_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x21 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i8
+IFORM:       VPMOVSXBD_ZMMi32_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXBQ (VPMOVSXBQ-512-1)
+{
+ICLASS:      VPMOVSXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x22 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVSXBQ_ZMMi64_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x22 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i8
+IFORM:       VPMOVSXBQ_ZMMi64_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXDQ (VPMOVSXDQ-512-1)
+{
+ICLASS:      VPMOVSXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i32
+IFORM:       VPMOVSXDQ_ZMMi64_MASKmskw_YMMi32_AVX512
+}
+
+{
+ICLASS:      VPMOVSXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x25 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:i32
+IFORM:       VPMOVSXDQ_ZMMi64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPMOVSXWD (VPMOVSXWD-512-1)
+{
+ICLASS:      VPMOVSXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i16
+IFORM:       VPMOVSXWD_ZMMi32_MASKmskw_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVSXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x23 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:i16
+IFORM:       VPMOVSXWD_ZMMi32_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVSXWQ (VPMOVSXWQ-512-1)
+{
+ICLASS:      VPMOVSXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x24 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVSXWQ_ZMMi64_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVSXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x24 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i16
+IFORM:       VPMOVSXWQ_ZMMi64_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVUSDB (VPMOVUSDB-512-1)
+{
+ICLASS:      VPMOVUSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32
+IFORM:       VPMOVUSDB_XMMu8_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDB (VPMOVUSDB-512-2)
+{
+ICLASS:      VPMOVUSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x11 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:dq:u8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32
+IFORM:       VPMOVUSDB_MEMu8_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDW (VPMOVUSDW-512-1)
+{
+ICLASS:      VPMOVUSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x13 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32
+IFORM:       VPMOVUSDW_YMMu16_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDW (VPMOVUSDW-512-2)
+{
+ICLASS:      VPMOVUSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x13 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:u16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32
+IFORM:       VPMOVUSDW_MEMu16_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSQB (VPMOVUSQB-512-1)
+{
+ICLASS:      VPMOVUSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VPMOVUSQB_XMMu8_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQB (VPMOVUSQB-512-2)
+{
+ICLASS:      VPMOVUSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x12 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:q:u8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPMOVUSQB_MEMu8_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQD (VPMOVUSQD-512-1)
+{
+ICLASS:      VPMOVUSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VPMOVUSQD_YMMu32_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQD (VPMOVUSQD-512-2)
+{
+ICLASS:      VPMOVUSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x15 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:u32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPMOVUSQD_MEMu32_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQW (VPMOVUSQW-512-1)
+{
+ICLASS:      VPMOVUSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64
+IFORM:       VPMOVUSQW_XMMu16_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQW (VPMOVUSQW-512-2)
+{
+ICLASS:      VPMOVUSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x14 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:dq:u16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPMOVUSQW_MEMu16_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVZXBD (VPMOVZXBD-512-1)
+{
+ICLASS:      VPMOVZXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x31 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVZXBD_ZMMi32_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x31 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i8
+IFORM:       VPMOVZXBD_ZMMi32_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXBQ (VPMOVZXBQ-512-1)
+{
+ICLASS:      VPMOVZXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x32 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVZXBQ_ZMMi64_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x32 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i8
+IFORM:       VPMOVZXBQ_ZMMi64_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXDQ (VPMOVZXDQ-512-1)
+{
+ICLASS:      VPMOVZXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x35 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i32
+IFORM:       VPMOVZXDQ_ZMMi64_MASKmskw_YMMi32_AVX512
+}
+
+{
+ICLASS:      VPMOVZXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x35 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:i32
+IFORM:       VPMOVZXDQ_ZMMi64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPMOVZXWD (VPMOVZXWD-512-1)
+{
+ICLASS:      VPMOVZXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x33 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i16
+IFORM:       VPMOVZXWD_ZMMi32_MASKmskw_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVZXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x33 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:i16
+IFORM:       VPMOVZXWD_ZMMi32_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVZXWQ (VPMOVZXWQ-512-1)
+{
+ICLASS:      VPMOVZXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x34 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVZXWQ_ZMMi64_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVZXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x34 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i16
+IFORM:       VPMOVZXWQ_ZMMi64_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMULDQ (VPMULDQ-512-1)
+{
+ICLASS:      VPMULDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi32 REG3=ZMM_B3():r:zi32
+IFORM:       VPMULDQ_ZMMi64_MASKmskw_ZMMi32_ZMMi32_AVX512
+}
+
+{
+ICLASS:      VPMULDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION DOUBLE_WIDE_MEMOP DISP8_FULL BROADCAST_ENABLED MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMULDQ_ZMMi64_MASKmskw_ZMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMULLD (VPMULLD-512-1)
+{
+ICLASS:      VPMULLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x40 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPMULLD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPMULLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x40 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMULLD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMULUDQ (VPMULUDQ-512-1)
+{
+ICLASS:      VPMULUDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPMULUDQ_ZMMu64_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPMULUDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION DOUBLE_WIDE_MEMOP DISP8_FULL BROADCAST_ENABLED MASKOP_EVEX
+PATTERN:    EVV 0xF4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMULUDQ_ZMMu64_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPORD (VPORD-512-1)
+{
+ICLASS:      VPORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPORD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPORD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPORQ (VPORQ-512-1)
+{
+ICLASS:      VPORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPORQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPORQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPROLD (VPROLD-512-1)
+{
+ICLASS:      VPROLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b001] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPROLD_ZMMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPROLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b001] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPROLD_ZMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPROLQ (VPROLQ-512-1)
+{
+ICLASS:      VPROLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b001] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPROLQ_ZMMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPROLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b001] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPROLQ_ZMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPROLVD (VPROLVD-512-1)
+{
+ICLASS:      VPROLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPROLVD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPROLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPROLVD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPROLVQ (VPROLVQ-512-1)
+{
+ICLASS:      VPROLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPROLVQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPROLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPROLVQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPRORD (VPRORD-512-1)
+{
+ICLASS:      VPRORD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b000] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPRORD_ZMMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPRORD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b000] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPRORD_ZMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPRORQ (VPRORQ-512-1)
+{
+ICLASS:      VPRORQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b000] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPRORQ_ZMMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPRORQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b000] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPRORQ_ZMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPRORVD (VPRORVD-512-1)
+{
+ICLASS:      VPRORVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPRORVD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPRORVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPRORVD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPRORVQ (VPRORVQ-512-1)
+{
+ICLASS:      VPRORVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPRORVQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPRORVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPRORVQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSCATTERDD (VPSCATTERDD-512-1)
+{
+ICLASS:      VPSCATTERDD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA0 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W0 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:u32 REG0=MASKNOT0():rw:mskw REG1=ZMM_R3():r:zu32
+IFORM:       VPSCATTERDD_MEMu32_MASKmskw_ZMMu32_AVX512_VL512
+}
+
+
+# EMITTING VPSCATTERDQ (VPSCATTERDQ-512-1)
+{
+ICLASS:      VPSCATTERDQ
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA0 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W1 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:u64 REG0=MASKNOT0():rw:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPSCATTERDQ_MEMu64_MASKmskw_ZMMu64_AVX512_VL512
+}
+
+
+# EMITTING VPSCATTERQD (VPSCATTERQD-512-1)
+{
+ICLASS:      VPSCATTERQD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA1 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W0 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:u32 REG0=MASKNOT0():rw:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VPSCATTERQD_MEMu32_MASKmskw_YMMu32_AVX512_VL512
+}
+
+
+# EMITTING VPSCATTERQQ (VPSCATTERQQ-512-1)
+{
+ICLASS:      VPSCATTERQQ
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA1 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W1 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:u64 REG0=MASKNOT0():rw:mskw REG1=ZMM_R3():r:zu64
+IFORM:       VPSCATTERQQ_MEMu64_MASKmskw_ZMMu64_AVX512_VL512
+}
+
+
+# EMITTING VPSHUFD (VPSHUFD-512-1)
+{
+ICLASS:      VPSHUFD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPSHUFD_ZMMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x70 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHUFD_ZMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLD (VPSLLD-512-1)
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSLLD_ZMMu32_MASKmskw_ZMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:dq:u32
+IFORM:       VPSLLD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSLLD (VPSLLD-512-2)
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPSLLD_ZMMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSLLD_ZMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLQ (VPSLLQ-512-1)
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSLLQ_ZMMu64_MASKmskw_ZMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:dq:u64
+IFORM:       VPSLLQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSLLQ (VPSLLQ-512-2)
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPSLLQ_ZMMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSLLQ_ZMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLVD (VPSLLVD-512-1)
+{
+ICLASS:      VPSLLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x47 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPSLLVD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPSLLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x47 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSLLVD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSLLVQ (VPSLLVQ-512-1)
+{
+ICLASS:      VPSLLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x47 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPSLLVQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPSLLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x47 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSLLVQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRAD (VPSRAD-512-1)
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSRAD_ZMMu32_MASKmskw_ZMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:dq:u32
+IFORM:       VPSRAD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRAD (VPSRAD-512-2)
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPSRAD_ZMMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRAD_ZMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAQ (VPSRAQ-512-1)
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSRAQ_ZMMu64_MASKmskw_ZMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:dq:u64
+IFORM:       VPSRAQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRAQ (VPSRAQ-512-2)
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPSRAQ_ZMMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRAQ_ZMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAVD (VPSRAVD-512-1)
+{
+ICLASS:      VPSRAVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x46 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPSRAVD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRAVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x46 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSRAVD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRAVQ (VPSRAVQ-512-1)
+{
+ICLASS:      VPSRAVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x46 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPSRAVQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRAVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x46 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSRAVQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRLD (VPSRLD-512-1)
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSRLD_ZMMu32_MASKmskw_ZMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:dq:u32
+IFORM:       VPSRLD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRLD (VPSRLD-512-2)
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPSRLD_ZMMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRLD_ZMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLQ (VPSRLQ-512-1)
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSRLQ_ZMMu64_MASKmskw_ZMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:dq:u64
+IFORM:       VPSRLQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRLQ (VPSRLQ-512-2)
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPSRLQ_ZMMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_N3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRLQ_ZMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLVD (VPSRLVD-512-1)
+{
+ICLASS:      VPSRLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x45 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPSRLVD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x45 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSRLVD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRLVQ (VPSRLVQ-512-1)
+{
+ICLASS:      VPSRLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x45 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPSRLVQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x45 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSRLVQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSUBD (VPSUBD-512-1)
+{
+ICLASS:      VPSUBD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPSUBD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPSUBD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSUBD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSUBQ (VPSUBQ-512-1)
+{
+ICLASS:      VPSUBQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPSUBQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPSUBQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSUBQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPTERNLOGD (VPTERNLOGD-512-1)
+{
+ICLASS:      VPTERNLOGD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPTERNLOGD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPTERNLOGD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x25 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPTERNLOGD_ZMMu32_MASKmskw_ZMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPTERNLOGQ (VPTERNLOGQ-512-1)
+{
+ICLASS:      VPTERNLOGQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPTERNLOGQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPTERNLOGQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x25 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPTERNLOGQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPTESTMD (VPTESTMD-512-1)
+{
+ICLASS:      VPTESTMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPTESTMD_MASKmskw_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPTESTMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPTESTMD_MASKmskw_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPTESTMQ (VPTESTMQ-512-1)
+{
+ICLASS:      VPTESTMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPTESTMQ_MASKmskw_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPTESTMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPTESTMQ_MASKmskw_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPTESTNMD (VPTESTNMD-512-1)
+{
+ICLASS:      VPTESTNMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPTESTNMD_MASKmskw_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPTESTNMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPTESTNMD_MASKmskw_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPTESTNMQ (VPTESTNMQ-512-1)
+{
+ICLASS:      VPTESTNMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPTESTNMQ_MASKmskw_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPTESTNMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPTESTNMQ_MASKmskw_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPUNPCKHDQ (VPUNPCKHDQ-512-1)
+{
+ICLASS:      VPUNPCKHDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPUNPCKHDQ_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPUNPCKHDQ_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPUNPCKHQDQ (VPUNPCKHQDQ-512-1)
+{
+ICLASS:      VPUNPCKHQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6D V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPUNPCKHQDQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6D V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPUNPCKHQDQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPUNPCKLDQ (VPUNPCKLDQ-512-1)
+{
+ICLASS:      VPUNPCKLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPUNPCKLDQ_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x62 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPUNPCKLDQ_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPUNPCKLQDQ (VPUNPCKLQDQ-512-1)
+{
+ICLASS:      VPUNPCKLQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6C V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPUNPCKLQDQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6C V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPUNPCKLQDQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPXORD (VPXORD-512-1)
+{
+ICLASS:      VPXORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPXORD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPXORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPXORD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPXORQ (VPXORQ-512-1)
+{
+ICLASS:      VPXORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPXORQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPXORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPXORQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VRCP14PD (VRCP14PD-512-1)
+{
+ICLASS:      VRCP14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VRCP14PD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VRCP14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VRCP14PD_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VRCP14PS (VRCP14PS-512-1)
+{
+ICLASS:      VRCP14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VRCP14PS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VRCP14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VRCP14PS_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VRCP14SD (VRCP14SD-128-1)
+{
+ICLASS:      VRCP14SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x4D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VRCP14SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VRCP14SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x4D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VRCP14SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VRCP14SS (VRCP14SS-128-1)
+{
+ICLASS:      VRCP14SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x4D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VRCP14SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VRCP14SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x4D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VRCP14SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VRNDSCALEPD (VRNDSCALEPD-512-1)
+{
+ICLASS:      VRNDSCALEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x09 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VRNDSCALEPD_ZMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x09 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VRNDSCALEPD_ZMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x09 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRNDSCALEPD_ZMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VRNDSCALEPS (VRNDSCALEPS-512-1)
+{
+ICLASS:      VRNDSCALEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x08 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VRNDSCALEPS_ZMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x08 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VRNDSCALEPS_ZMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x08 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRNDSCALEPS_ZMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRNDSCALESD (VRNDSCALESD-128-1)
+{
+ICLASS:      VRNDSCALESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x0B V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VRNDSCALESD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x0B V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VRNDSCALESD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x0B V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1   UIMM8()  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64 IMM0:r:b
+IFORM:       VRNDSCALESD_XMMf64_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VRNDSCALESS (VRNDSCALESS-128-1)
+{
+ICLASS:      VRNDSCALESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x0A V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VRNDSCALESS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x0A V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VRNDSCALESS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x0A V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0   UIMM8()  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+IFORM:       VRNDSCALESS_XMMf32_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRSQRT14PD (VRSQRT14PD-512-1)
+{
+ICLASS:      VRSQRT14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VRSQRT14PD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VRSQRT14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VRSQRT14PD_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VRSQRT14PS (VRSQRT14PS-512-1)
+{
+ICLASS:      VRSQRT14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VRSQRT14PS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VRSQRT14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VRSQRT14PS_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VRSQRT14SD (VRSQRT14SD-128-1)
+{
+ICLASS:      VRSQRT14SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x4F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VRSQRT14SD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VRSQRT14SD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x4F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VRSQRT14SD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VRSQRT14SS (VRSQRT14SS-128-1)
+{
+ICLASS:      VRSQRT14SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x4F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VRSQRT14SS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VRSQRT14SS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E10
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x4F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VRSQRT14SS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VSCALEFPD (VSCALEFPD-512-1)
+{
+ICLASS:      VSCALEFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x2C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VSCALEFPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VSCALEFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x2C V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VSCALEFPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VSCALEFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x2C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSCALEFPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSCALEFPS (VSCALEFPS-512-1)
+{
+ICLASS:      VSCALEFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x2C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VSCALEFPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VSCALEFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x2C V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VSCALEFPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VSCALEFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x2C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSCALEFPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VSCALEFSD (VSCALEFSD-128-1)
+{
+ICLASS:      VSCALEFSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VSCALEFSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSCALEFSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VSCALEFSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSCALEFSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x2D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VSCALEFSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSCALEFSS (VSCALEFSS-128-1)
+{
+ICLASS:      VSCALEFSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VSCALEFSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSCALEFSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2D V66 V0F38 MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VSCALEFSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSCALEFSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x2D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VSCALEFSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VSCATTERDPD (VSCATTERDPD-512-1)
+{
+ICLASS:      VSCATTERDPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA2 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W1 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:f64 REG0=MASKNOT0():rw:mskw REG1=ZMM_R3():r:zf64
+IFORM:       VSCATTERDPD_MEMf64_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+
+# EMITTING VSCATTERDPS (VSCATTERDPS-512-1)
+{
+ICLASS:      VSCATTERDPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA2 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W0 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:f32 REG0=MASKNOT0():rw:mskw REG1=ZMM_R3():r:zf32
+IFORM:       VSCATTERDPS_MEMf32_MASKmskw_ZMMf32_AVX512_VL512
+}
+
+
+# EMITTING VSCATTERQPD (VSCATTERQPD-512-1)
+{
+ICLASS:      VSCATTERQPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA3 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W1 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:f64 REG0=MASKNOT0():rw:mskw REG1=ZMM_R3():r:zf64
+IFORM:       VSCATTERQPD_MEMf64_MASKmskw_ZMMf64_AVX512_VL512
+}
+
+
+# EMITTING VSCATTERQPS (VSCATTERQPS-512-1)
+{
+ICLASS:      VSCATTERQPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA3 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL512  W0 UISA_VMODRM_ZMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:f32 REG0=MASKNOT0():rw:mskw REG1=YMM_R3():r:qq:f32
+IFORM:       VSCATTERQPS_MEMf32_MASKmskw_YMMf32_AVX512_VL512
+}
+
+
+# EMITTING VSHUFF32X4 (VSHUFF32X4-512-1)
+{
+ICLASS:      VSHUFF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VSHUFF32X4_ZMMf32_MASKmskw_ZMMf32_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x23 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFF32X4_ZMMf32_MASKmskw_ZMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFF64X2 (VSHUFF64X2-512-1)
+{
+ICLASS:      VSHUFF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VSHUFF64X2_ZMMf64_MASKmskw_ZMMf64_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x23 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFF64X2_ZMMf64_MASKmskw_ZMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFI32X4 (VSHUFI32X4-512-1)
+{
+ICLASS:      VSHUFI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x43 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VSHUFI32X4_ZMMu32_MASKmskw_ZMMu32_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x43 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFI32X4_ZMMu32_MASKmskw_ZMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFI64X2 (VSHUFI64X2-512-1)
+{
+ICLASS:      VSHUFI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x43 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VSHUFI64X2_ZMMu64_MASKmskw_ZMMu64_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x43 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFI64X2_ZMMu64_MASKmskw_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFPD (VSHUFPD-512-1)
+{
+ICLASS:      VSHUFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VSHUFPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFPS (VSHUFPS-512-1)
+{
+ICLASS:      VSHUFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC6 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VSHUFPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC6 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VSQRTPD (VSQRTPD-512-1)
+{
+ICLASS:      VSQRTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x51 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VSQRTPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VSQRTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x51 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VSQRTPD_ZMMf64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VSQRTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x51 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSQRTPD_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VSQRTPS (VSQRTPS-512-1)
+{
+ICLASS:      VSQRTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x51 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VSQRTPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VSQRTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x51 VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32
+IFORM:       VSQRTPS_ZMMf32_MASKmskw_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VSQRTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x51 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSQRTPS_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VSQRTSD (VSQRTSD-128-1)
+{
+ICLASS:      VSQRTSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x51 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VSQRTSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSQRTSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x51 VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VSQRTSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSQRTSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x51 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VSQRTSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSQRTSS (VSQRTSS-128-1)
+{
+ICLASS:      VSQRTSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x51 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VSQRTSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSQRTSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x51 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VSQRTSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSQRTSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x51 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VSQRTSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VSUBPD (VSUBPD-512-1)
+{
+ICLASS:      VSUBPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5C V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VSUBPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VSUBPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5C V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VSUBPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VSUBPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5C V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSUBPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSUBPS (VSUBPS-512-1)
+{
+ICLASS:      VSUBPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5C VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VSUBPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VSUBPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5C VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VSUBPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VSUBPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5C VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSUBPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VSUBSD (VSUBSD-128-1)
+{
+ICLASS:      VSUBSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5C VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VSUBSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSUBSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5C VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VSUBSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSUBSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5C VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1    ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VSUBSD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSUBSS (VSUBSS-128-1)
+{
+ICLASS:      VSUBSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5C VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VSUBSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSUBSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x5C VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() AVX512_ROUND()  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VSUBSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSUBSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x5C VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0    ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VSUBSS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VUCOMISD (VUCOMISD-128-1)
+{
+ICLASS:      VUCOMISD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2E V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():r:dq:f64 REG1=XMM_B3():r:dq:f64
+IFORM:       VUCOMISD_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VUCOMISD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2E V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():r:dq:f64:TXT=SAESTR REG1=XMM_B3():r:dq:f64
+IFORM:       VUCOMISD_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VUCOMISD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x2E V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():r:dq:f64 MEM0:r:q:f64
+IFORM:       VUCOMISD_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VUCOMISS (VUCOMISS-128-1)
+{
+ICLASS:      VUCOMISS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2E VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():r:dq:f32 REG1=XMM_B3():r:dq:f32
+IFORM:       VUCOMISS_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VUCOMISS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x2E VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():r:dq:f32:TXT=SAESTR REG1=XMM_B3():r:dq:f32
+IFORM:       VUCOMISS_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VUCOMISS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_SCALAR
+EXCEPTIONS:     AVX512-E3NF
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-mod of-0 af-0 sf-0 ]
+ATTRIBUTES:  MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x2E VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():r:dq:f32 MEM0:r:d:f32
+IFORM:       VUCOMISS_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VUNPCKHPD (VUNPCKHPD-512-1)
+{
+ICLASS:      VUNPCKHPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VUNPCKHPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VUNPCKHPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VUNPCKHPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VUNPCKHPS (VUNPCKHPS-512-1)
+{
+ICLASS:      VUNPCKHPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VUNPCKHPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VUNPCKHPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VUNPCKHPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VUNPCKLPD (VUNPCKLPD-512-1)
+{
+ICLASS:      VUNPCKLPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VUNPCKLPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VUNPCKLPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VUNPCKLPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VUNPCKLPS (VUNPCKLPS-512-1)
+{
+ICLASS:      VUNPCKLPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VUNPCKLPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VUNPCKLPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VUNPCKLPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+AVX_INSTRUCTIONS()::
+# EMITTING KANDNW (KANDNW-256-1)
+{
+ICLASS:      KANDNW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x42 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KANDNW_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KANDW (KANDW-256-1)
+{
+ICLASS:      KANDW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x41 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KANDW_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KMOVW (KMOVW-128-1)
+{
+ICLASS:      KMOVW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x90 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw:u16
+IFORM:       KMOVW_MASKmskw_MASKu16_AVX512
+}
+
+{
+ICLASS:      KMOVW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x90 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw MEM0:r:wrd:u16
+IFORM:       KMOVW_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING KMOVW (KMOVW-128-2)
+{
+ICLASS:      KMOVW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x91 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL=0  W0  NOVSR
+OPERANDS:    MEM0:w:wrd:u16 REG0=MASK_R():r:mskw
+IFORM:       KMOVW_MEMu16_MASKmskw_AVX512
+}
+
+
+# EMITTING KMOVW (KMOVW-128-3)
+{
+ICLASS:      KMOVW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x92 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=GPR32_B():r:d:u32
+IFORM:       KMOVW_MASKmskw_GPR32u32_AVX512
+}
+
+
+# EMITTING KMOVW (KMOVW-128-4)
+{
+ICLASS:      KMOVW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x93 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=GPR32_R():w:d:u32 REG1=MASK_B():r:mskw
+IFORM:       KMOVW_GPR32u32_MASKmskw_AVX512
+}
+
+
+# EMITTING KNOTW (KNOTW-128-1)
+{
+ICLASS:      KNOTW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x44 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw
+IFORM:       KNOTW_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KORTESTW (KORTESTW-128-1)
+{
+ICLASS:      KORTESTW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-0 of-0 af-0 sf-0 ]
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x98 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():r:mskw REG1=MASK_B():r:mskw
+IFORM:       KORTESTW_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KORW (KORW-256-1)
+{
+ICLASS:      KORW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x45 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KORW_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KSHIFTLW (KSHIFTLW-128-1)
+{
+ICLASS:      KSHIFTLW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x32 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw IMM0:r:b
+IFORM:       KSHIFTLW_MASKmskw_MASKmskw_IMM8_AVX512
+}
+
+
+# EMITTING KSHIFTRW (KSHIFTRW-128-1)
+{
+ICLASS:      KSHIFTRW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x30 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw IMM0:r:b
+IFORM:       KSHIFTRW_MASKmskw_MASKmskw_IMM8_AVX512
+}
+
+
+# EMITTING KUNPCKBW (KUNPCKBW-256-1)
+{
+ICLASS:      KUNPCKBW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x4B V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KUNPCKBW_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KXNORW (KXNORW-256-1)
+{
+ICLASS:      KXNORW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x46 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KXNORW_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KXORW (KXORW-256-1)
+{
+ICLASS:      KXORW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512F_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x47 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KXORW_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/avx512cd/vconflict-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VPBROADCASTMB2Q (VPBROADCASTMB2Q-512-1)
+{
+ICLASS:      VPBROADCASTMB2Q
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x2A VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK_B():r:mskw:u64 EMX_BROADCAST_1TO8_8
+IFORM:       VPBROADCASTMB2Q_ZMMu64_MASKu64_AVX512CD
+}
+
+
+# EMITTING VPBROADCASTMW2D (VPBROADCASTMW2D-512-1)
+{
+ICLASS:      VPBROADCASTMW2D
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x3A VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK_B():r:mskw:u32 EMX_BROADCAST_1TO16_16
+IFORM:       VPBROADCASTMW2D_ZMMu32_MASKu32_AVX512CD
+}
+
+
+# EMITTING VPCONFLICTD (VPCONFLICTD-512-1)
+{
+ICLASS:      VPCONFLICTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32
+IFORM:       VPCONFLICTD_ZMMu32_MASKmskw_ZMMu32_AVX512CD
+}
+
+{
+ICLASS:      VPCONFLICTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPCONFLICTD_ZMMu32_MASKmskw_MEMu32_AVX512CD
+}
+
+
+# EMITTING VPCONFLICTQ (VPCONFLICTQ-512-1)
+{
+ICLASS:      VPCONFLICTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VPCONFLICTQ_ZMMu64_MASKmskw_ZMMu64_AVX512CD
+}
+
+{
+ICLASS:      VPCONFLICTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPCONFLICTQ_ZMMu64_MASKmskw_MEMu64_AVX512CD
+}
+
+
+# EMITTING VPLZCNTD (VPLZCNTD-512-1)
+{
+ICLASS:      VPLZCNTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x44 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu32
+IFORM:       VPLZCNTD_ZMMu32_MASKmskw_ZMMu32_AVX512CD
+}
+
+{
+ICLASS:      VPLZCNTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x44 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPLZCNTD_ZMMu32_MASKmskw_MEMu32_AVX512CD
+}
+
+
+# EMITTING VPLZCNTQ (VPLZCNTQ-512-1)
+{
+ICLASS:      VPLZCNTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x44 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VPLZCNTQ_ZMMu64_MASKmskw_ZMMu64_AVX512CD
+}
+
+{
+ICLASS:      VPLZCNTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x44 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPLZCNTQ_ZMMu64_MASKmskw_MEMu64_AVX512CD
+}
+
+
+
+
+###FILE: ./datafiles/avx512-skx/skx-isa.xed.txt
+
+#BEGIN_LEGAL
+#INTEL CONFIDENTIAL
+#
+#Copyright (c) 2017, Intel Corporation. All rights reserved.
+#
+#The source code contained or described herein and all documents
+#related to the source code ("Material") are owned by Intel Corporation
+#or its suppliers or licensors. Title to the Material remains with
+#Intel Corporation or its suppliers and licensors. The Material
+#contains trade secrets and proprietary and confidential information of
+#Intel or its suppliers and licensors. The Material is protected by
+#worldwide copyright and trade secret laws and treaty provisions. No
+#part of the Material may be used, copied, reproduced, modified,
+#published, uploaded, posted, transmitted, distributed, or disclosed in
+#any way without Intel's prior express written permission.
+#
+#No license under any patent, copyright, trade secret or other
+#intellectual property right is granted to or conferred upon you by
+#disclosure or delivery of the Materials, either expressly, by
+#implication, inducement, estoppel or otherwise. Any license under such
+#intellectual property rights must be express and approved by Intel in
+#writing.
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VADDPD (VADDPD-128-1)
+{
+ICLASS:      VADDPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x58 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VADDPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VADDPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x58 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VADDPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VADDPD (VADDPD-256-1)
+{
+ICLASS:      VADDPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x58 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VADDPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VADDPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x58 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VADDPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VADDPS (VADDPS-128-1)
+{
+ICLASS:      VADDPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x58 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VADDPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VADDPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x58 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VADDPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VADDPS (VADDPS-256-1)
+{
+ICLASS:      VADDPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x58 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VADDPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VADDPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x58 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VADDPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VALIGND (VALIGND-128-1)
+{
+ICLASS:      VALIGND
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x03 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VALIGND_XMMu32_MASKmskw_XMMu32_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VALIGND
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x03 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VALIGND_XMMu32_MASKmskw_XMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VALIGND (VALIGND-256-1)
+{
+ICLASS:      VALIGND
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x03 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VALIGND_YMMu32_MASKmskw_YMMu32_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VALIGND
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x03 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VALIGND_YMMu32_MASKmskw_YMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VALIGNQ (VALIGNQ-128-1)
+{
+ICLASS:      VALIGNQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x03 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VALIGNQ_XMMu64_MASKmskw_XMMu64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VALIGNQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x03 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VALIGNQ_XMMu64_MASKmskw_XMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VALIGNQ (VALIGNQ-256-1)
+{
+ICLASS:      VALIGNQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x03 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VALIGNQ_YMMu64_MASKmskw_YMMu64_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VALIGNQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x03 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VALIGNQ_YMMu64_MASKmskw_YMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VANDNPD (VANDNPD-128-1)
+{
+ICLASS:      VANDNPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VANDNPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VANDNPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VANDNPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VANDNPD (VANDNPD-256-1)
+{
+ICLASS:      VANDNPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VANDNPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VANDNPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VANDNPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VANDNPD (VANDNPD-512-1)
+{
+ICLASS:      VANDNPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VANDNPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VANDNPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VANDNPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VANDNPS (VANDNPS-128-1)
+{
+ICLASS:      VANDNPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VANDNPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VANDNPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VANDNPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VANDNPS (VANDNPS-256-1)
+{
+ICLASS:      VANDNPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VANDNPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VANDNPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VANDNPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VANDNPS (VANDNPS-512-1)
+{
+ICLASS:      VANDNPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VANDNPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VANDNPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VANDNPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VANDPD (VANDPD-128-1)
+{
+ICLASS:      VANDPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x54 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VANDPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VANDPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VANDPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VANDPD (VANDPD-256-1)
+{
+ICLASS:      VANDPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x54 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VANDPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VANDPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VANDPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VANDPD (VANDPD-512-1)
+{
+ICLASS:      VANDPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x54 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VANDPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VANDPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VANDPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VANDPS (VANDPS-128-1)
+{
+ICLASS:      VANDPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x54 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VANDPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VANDPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x54 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VANDPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VANDPS (VANDPS-256-1)
+{
+ICLASS:      VANDPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x54 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VANDPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VANDPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x54 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VANDPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VANDPS (VANDPS-512-1)
+{
+ICLASS:      VANDPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x54 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VANDPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VANDPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x54 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VANDPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VBLENDMPD (VBLENDMPD-128-1)
+{
+ICLASS:      VBLENDMPD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VBLENDMPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VBLENDMPD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VBLENDMPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VBLENDMPD (VBLENDMPD-256-1)
+{
+ICLASS:      VBLENDMPD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VBLENDMPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VBLENDMPD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VBLENDMPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VBLENDMPS (VBLENDMPS-128-1)
+{
+ICLASS:      VBLENDMPS
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VBLENDMPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VBLENDMPS
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VBLENDMPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VBLENDMPS (VBLENDMPS-256-1)
+{
+ICLASS:      VBLENDMPS
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VBLENDMPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VBLENDMPS
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x65 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VBLENDMPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTF32X2 (VBROADCASTF32X2-256-1)
+{
+ICLASS:      VBROADCASTF32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x19 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 EMX_BROADCAST_2TO8_32
+IFORM:       VBROADCASTF32X2_YMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VBROADCASTF32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x19 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:f32 EMX_BROADCAST_2TO8_32
+IFORM:       VBROADCASTF32X2_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTF32X2 (VBROADCASTF32X2-512-1)
+{
+ICLASS:      VBROADCASTF32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x19 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 EMX_BROADCAST_2TO16_32
+IFORM:       VBROADCASTF32X2_ZMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VBROADCASTF32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x19 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:f32 EMX_BROADCAST_2TO16_32
+IFORM:       VBROADCASTF32X2_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTF32X4 (VBROADCASTF32X4-256-1)
+{
+ICLASS:      VBROADCASTF32X4
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x1A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f32 EMX_BROADCAST_4TO8_32
+IFORM:       VBROADCASTF32X4_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTF32X8 (VBROADCASTF32X8-512-1)
+{
+ICLASS:      VBROADCASTF32X8
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE8
+PATTERN:    EVV 0x1B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f32 EMX_BROADCAST_8TO16_32
+IFORM:       VBROADCASTF32X8_ZMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTF64X2 (VBROADCASTF64X2-256-1)
+{
+ICLASS:      VBROADCASTF64X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x1A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f64 EMX_BROADCAST_2TO4_64
+IFORM:       VBROADCASTF64X2_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VBROADCASTF64X2 (VBROADCASTF64X2-512-1)
+{
+ICLASS:      VBROADCASTF64X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x1A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f64 EMX_BROADCAST_2TO8_64
+IFORM:       VBROADCASTF64X2_ZMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VBROADCASTI32X2 (VBROADCASTI32X2-128-1)
+{
+ICLASS:      VBROADCASTI32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x59 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 EMX_BROADCAST_2TO4_32
+IFORM:       VBROADCASTI32X2_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VBROADCASTI32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x59 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:u32 EMX_BROADCAST_2TO4_32
+IFORM:       VBROADCASTI32X2_XMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VBROADCASTI32X2 (VBROADCASTI32X2-256-1)
+{
+ICLASS:      VBROADCASTI32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x59 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 EMX_BROADCAST_2TO8_32
+IFORM:       VBROADCASTI32X2_YMMu32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VBROADCASTI32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x59 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:u32 EMX_BROADCAST_2TO8_32
+IFORM:       VBROADCASTI32X2_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VBROADCASTI32X2 (VBROADCASTI32X2-512-1)
+{
+ICLASS:      VBROADCASTI32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x59 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 EMX_BROADCAST_2TO16_32
+IFORM:       VBROADCASTI32X2_ZMMu32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VBROADCASTI32X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x59 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:u32 EMX_BROADCAST_2TO16_32
+IFORM:       VBROADCASTI32X2_ZMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VBROADCASTI32X4 (VBROADCASTI32X4-256-1)
+{
+ICLASS:      VBROADCASTI32X4
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x5A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u32 EMX_BROADCAST_4TO8_32
+IFORM:       VBROADCASTI32X4_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VBROADCASTI32X8 (VBROADCASTI32X8-512-1)
+{
+ICLASS:      VBROADCASTI32X8
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE8
+PATTERN:    EVV 0x5B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u32 EMX_BROADCAST_8TO16_32
+IFORM:       VBROADCASTI32X8_ZMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VBROADCASTI64X2 (VBROADCASTI64X2-256-1)
+{
+ICLASS:      VBROADCASTI64X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x5A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u64 EMX_BROADCAST_2TO4_64
+IFORM:       VBROADCASTI64X2_YMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VBROADCASTI64X2 (VBROADCASTI64X2-512-1)
+{
+ICLASS:      VBROADCASTI64X2
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x5A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u64 EMX_BROADCAST_2TO8_64
+IFORM:       VBROADCASTI64X2_ZMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VBROADCASTSD (VBROADCASTSD-256-1)
+{
+ICLASS:      VBROADCASTSD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x19 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:f64 EMX_BROADCAST_1TO4_64
+IFORM:       VBROADCASTSD_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VBROADCASTSD (VBROADCASTSD-256-2)
+{
+ICLASS:      VBROADCASTSD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x19 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64 EMX_BROADCAST_1TO4_64
+IFORM:       VBROADCASTSD_YMMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VBROADCASTSS (VBROADCASTSS-128-1)
+{
+ICLASS:      VBROADCASTSS
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x18 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:f32 EMX_BROADCAST_1TO4_32
+IFORM:       VBROADCASTSS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTSS (VBROADCASTSS-128-2)
+{
+ICLASS:      VBROADCASTSS
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x18 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 EMX_BROADCAST_1TO4_32
+IFORM:       VBROADCASTSS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTSS (VBROADCASTSS-256-1)
+{
+ICLASS:      VBROADCASTSS
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x18 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:f32 EMX_BROADCAST_1TO8_32
+IFORM:       VBROADCASTSS_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VBROADCASTSS (VBROADCASTSS-256-2)
+{
+ICLASS:      VBROADCASTSS
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x18 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 EMX_BROADCAST_1TO8_32
+IFORM:       VBROADCASTSS_YMMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VCMPPD (VCMPPD-128-1)
+{
+ICLASS:      VCMPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xC2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VCMPPD_MASKmskw_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xC2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VCMPPD_MASKmskw_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VCMPPD (VCMPPD-256-1)
+{
+ICLASS:      VCMPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xC2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VCMPPD_MASKmskw_MASKmskw_YMMf64_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xC2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VCMPPD_MASKmskw_MASKmskw_YMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VCMPPS (VCMPPS-128-1)
+{
+ICLASS:      VCMPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xC2 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VCMPPS_MASKmskw_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xC2 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VCMPPS_MASKmskw_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCMPPS (VCMPPS-256-1)
+{
+ICLASS:      VCMPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xC2 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VCMPPS_MASKmskw_MASKmskw_YMMf32_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VCMPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xC2 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VCMPPS_MASKmskw_MASKmskw_YMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCOMPRESSPD (VCOMPRESSPD-128-1)
+{
+ICLASS:      VCOMPRESSPD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:dq:f64 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f64
+IFORM:       VCOMPRESSPD_MEMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VCOMPRESSPD (VCOMPRESSPD-128-2)
+{
+ICLASS:      VCOMPRESSPD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:f64
+IFORM:       VCOMPRESSPD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VCOMPRESSPD (VCOMPRESSPD-256-1)
+{
+ICLASS:      VCOMPRESSPD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:qq:f64 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f64
+IFORM:       VCOMPRESSPD_MEMf64_MASKmskw_YMMf64_AVX512
+}
+
+
+# EMITTING VCOMPRESSPD (VCOMPRESSPD-256-2)
+{
+ICLASS:      VCOMPRESSPD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f64
+IFORM:       VCOMPRESSPD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+
+# EMITTING VCOMPRESSPS (VCOMPRESSPS-128-1)
+{
+ICLASS:      VCOMPRESSPS
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:dq:f32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f32
+IFORM:       VCOMPRESSPS_MEMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VCOMPRESSPS (VCOMPRESSPS-128-2)
+{
+ICLASS:      VCOMPRESSPS
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:f32
+IFORM:       VCOMPRESSPS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VCOMPRESSPS (VCOMPRESSPS-256-1)
+{
+ICLASS:      VCOMPRESSPS
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:qq:f32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f32
+IFORM:       VCOMPRESSPS_MEMf32_MASKmskw_YMMf32_AVX512
+}
+
+
+# EMITTING VCOMPRESSPS (VCOMPRESSPS-256-2)
+{
+ICLASS:      VCOMPRESSPS
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f32
+IFORM:       VCOMPRESSPS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+
+# EMITTING VCVTDQ2PD (VCVTDQ2PD-128-1)
+{
+ICLASS:      VCVTDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE6 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i32
+IFORM:       VCVTDQ2PD_XMMf64_MASKmskw_XMMi32_AVX512
+}
+
+{
+ICLASS:      VCVTDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VCVTDQ2PD_XMMf64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VCVTDQ2PD (VCVTDQ2PD-256-1)
+{
+ICLASS:      VCVTDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE6 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i32
+IFORM:       VCVTDQ2PD_YMMf64_MASKmskw_XMMi32_AVX512
+}
+
+{
+ICLASS:      VCVTDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VCVTDQ2PD_YMMf64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VCVTDQ2PS (VCVTDQ2PS-128-1)
+{
+ICLASS:      VCVTDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i32
+IFORM:       VCVTDQ2PS_XMMf32_MASKmskw_XMMi32_AVX512
+}
+
+{
+ICLASS:      VCVTDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VCVTDQ2PS_XMMf32_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VCVTDQ2PS (VCVTDQ2PS-256-1)
+{
+ICLASS:      VCVTDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i32
+IFORM:       VCVTDQ2PS_YMMf32_MASKmskw_YMMi32_AVX512
+}
+
+{
+ICLASS:      VCVTDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VCVTDQ2PS_YMMf32_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VCVTPD2DQ (VCVTPD2DQ-128-1)
+{
+ICLASS:      VCVTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTPD2DQ_XMMi32_MASKmskw_XMMf64_AVX512_VL128
+}
+
+{
+ICLASS:      VCVTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2DQ_XMMi32_MASKmskw_MEMf64_AVX512_VL128
+}
+
+
+# EMITTING VCVTPD2DQ (VCVTPD2DQ-256-1)
+{
+ICLASS:      VCVTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTPD2DQ_XMMi32_MASKmskw_YMMf64_AVX512_VL256
+}
+
+{
+ICLASS:      VCVTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2DQ_XMMi32_MASKmskw_MEMf64_AVX512_VL256
+}
+
+
+# EMITTING VCVTPD2PS (VCVTPD2PS-128-1)
+{
+ICLASS:      VCVTPD2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTPD2PS_XMMf32_MASKmskw_XMMf64_AVX512_VL128
+}
+
+{
+ICLASS:      VCVTPD2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2PS_XMMf32_MASKmskw_MEMf64_AVX512_VL128
+}
+
+
+# EMITTING VCVTPD2PS (VCVTPD2PS-256-1)
+{
+ICLASS:      VCVTPD2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTPD2PS_XMMf32_MASKmskw_YMMf64_AVX512_VL256
+}
+
+{
+ICLASS:      VCVTPD2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2PS_XMMf32_MASKmskw_MEMf64_AVX512_VL256
+}
+
+
+# EMITTING VCVTPD2QQ (VCVTPD2QQ-128-1)
+{
+ICLASS:      VCVTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTPD2QQ_XMMi64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2QQ_XMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTPD2QQ (VCVTPD2QQ-256-1)
+{
+ICLASS:      VCVTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTPD2QQ_YMMi64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2QQ_YMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTPD2QQ (VCVTPD2QQ-512-1)
+{
+ICLASS:      VCVTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2QQ_ZMMi64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7B V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2QQ_ZMMi64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2QQ_ZMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTPD2UDQ (VCVTPD2UDQ-128-1)
+{
+ICLASS:      VCVTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTPD2UDQ_XMMu32_MASKmskw_XMMf64_AVX512_VL128
+}
+
+{
+ICLASS:      VCVTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2UDQ_XMMu32_MASKmskw_MEMf64_AVX512_VL128
+}
+
+
+# EMITTING VCVTPD2UDQ (VCVTPD2UDQ-256-1)
+{
+ICLASS:      VCVTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTPD2UDQ_XMMu32_MASKmskw_YMMf64_AVX512_VL256
+}
+
+{
+ICLASS:      VCVTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2UDQ_XMMu32_MASKmskw_MEMf64_AVX512_VL256
+}
+
+
+# EMITTING VCVTPD2UQQ (VCVTPD2UQQ-128-1)
+{
+ICLASS:      VCVTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTPD2UQQ_XMMu64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2UQQ_XMMu64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTPD2UQQ (VCVTPD2UQQ-256-1)
+{
+ICLASS:      VCVTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTPD2UQQ_YMMu64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2UQQ_YMMu64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTPD2UQQ (VCVTPD2UQQ-512-1)
+{
+ICLASS:      VCVTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2UQQ_ZMMu64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTPD2UQQ_ZMMu64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTPD2UQQ_ZMMu64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTPH2PS (VCVTPH2PS-128-2)
+{
+ICLASS:      VCVTPH2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x13 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f16
+IFORM:       VCVTPH2PS_XMMf32_MASKmskw_XMMf16_AVX512
+}
+
+{
+ICLASS:      VCVTPH2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR DISP8_HALFMEM
+PATTERN:    EVV 0x13 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:f16
+IFORM:       VCVTPH2PS_XMMf32_MASKmskw_MEMf16_AVX512
+}
+
+
+# EMITTING VCVTPH2PS (VCVTPH2PS-256-2)
+{
+ICLASS:      VCVTPH2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x13 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f16
+IFORM:       VCVTPH2PS_YMMf32_MASKmskw_XMMf16_AVX512
+}
+
+{
+ICLASS:      VCVTPH2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR DISP8_HALFMEM
+PATTERN:    EVV 0x13 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f16
+IFORM:       VCVTPH2PS_YMMf32_MASKmskw_MEMf16_AVX512
+}
+
+
+# EMITTING VCVTPS2DQ (VCVTPS2DQ-128-1)
+{
+ICLASS:      VCVTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTPS2DQ_XMMi32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2DQ_XMMi32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2DQ (VCVTPS2DQ-256-1)
+{
+ICLASS:      VCVTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTPS2DQ_YMMi32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2DQ_YMMi32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2PD (VCVTPS2PD-128-1)
+{
+ICLASS:      VCVTPS2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5A VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTPS2PD_XMMf64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5A VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2PD_XMMf64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2PD (VCVTPS2PD-256-1)
+{
+ICLASS:      VCVTPS2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5A VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTPS2PD_YMMf64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5A VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2PD_YMMf64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2PH (VCVTPS2PH-128-2)
+{
+ICLASS:      VCVTPS2PH
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E11NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x1D V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:f16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:f32 IMM0:r:b
+IFORM:       VCVTPS2PH_XMMf16_MASKmskw_XMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCVTPS2PH (VCVTPS2PH-128-3)
+{
+ICLASS:      VCVTPS2PH
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR DISP8_HALFMEM
+PATTERN:    EVV 0x1D V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:f16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f32 IMM0:r:b
+IFORM:       VCVTPS2PH_MEMf16_MASKmskw_XMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCVTPS2PH (VCVTPS2PH-256-2)
+{
+ICLASS:      VCVTPS2PH
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E11NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x1D V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:f16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f32 IMM0:r:b
+IFORM:       VCVTPS2PH_XMMf16_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCVTPS2PH (VCVTPS2PH-256-3)
+{
+ICLASS:      VCVTPS2PH
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E11
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR DISP8_HALFMEM
+PATTERN:    EVV 0x1D V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:f16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f32 IMM0:r:b
+IFORM:       VCVTPS2PH_MEMf16_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VCVTPS2QQ (VCVTPS2QQ-128-1)
+{
+ICLASS:      VCVTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTPS2QQ_XMMi64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2QQ_XMMi64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2QQ (VCVTPS2QQ-256-1)
+{
+ICLASS:      VCVTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTPS2QQ_YMMi64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2QQ_YMMi64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2QQ (VCVTPS2QQ-512-1)
+{
+ICLASS:      VCVTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTPS2QQ_ZMMi64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7B V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTPS2QQ_ZMMi64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2QQ_ZMMi64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2UDQ (VCVTPS2UDQ-128-1)
+{
+ICLASS:      VCVTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTPS2UDQ_XMMu32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2UDQ_XMMu32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2UDQ (VCVTPS2UDQ-256-1)
+{
+ICLASS:      VCVTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTPS2UDQ_YMMu32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2UDQ_YMMu32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2UQQ (VCVTPS2UQQ-128-1)
+{
+ICLASS:      VCVTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTPS2UQQ_XMMu64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2UQQ_XMMu64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2UQQ (VCVTPS2UQQ-256-1)
+{
+ICLASS:      VCVTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTPS2UQQ_YMMu64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2UQQ_YMMu64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTPS2UQQ (VCVTPS2UQQ-512-1)
+{
+ICLASS:      VCVTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTPS2UQQ_ZMMu64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x79 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTPS2UQQ_ZMMu64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x79 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTPS2UQQ_ZMMu64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTQQ2PD (VCVTQQ2PD-128-1)
+{
+ICLASS:      VCVTQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTQQ2PD_XMMi64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTQQ2PD_XMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTQQ2PD (VCVTQQ2PD-256-1)
+{
+ICLASS:      VCVTQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTQQ2PD_YMMi64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTQQ2PD_YMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTQQ2PD (VCVTQQ2PD-512-1)
+{
+ICLASS:      VCVTQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTQQ2PD_ZMMi64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTQQ2PD_ZMMi64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTQQ2PD_ZMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTQQ2PS (VCVTQQ2PS-128-1)
+{
+ICLASS:      VCVTQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VCVTQQ2PS_XMMf32_MASKmskw_XMMu64_AVX512_VL128
+}
+
+{
+ICLASS:      VCVTQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTQQ2PS_XMMf32_MASKmskw_MEMu64_AVX512_VL128
+}
+
+
+# EMITTING VCVTQQ2PS (VCVTQQ2PS-256-1)
+{
+ICLASS:      VCVTQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VCVTQQ2PS_XMMf32_MASKmskw_YMMu64_AVX512_VL256
+}
+
+{
+ICLASS:      VCVTQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTQQ2PS_XMMf32_MASKmskw_MEMu64_AVX512_VL256
+}
+
+
+# EMITTING VCVTQQ2PS (VCVTQQ2PS-512-1)
+{
+ICLASS:      VCVTQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VCVTQQ2PS_YMMf32_MASKmskw_ZMMu64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VNP V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VCVTQQ2PS_YMMf32_MASKmskw_ZMMu64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTQQ2PS_YMMf32_MASKmskw_MEMu64_AVX512_VL512
+}
+
+
+# EMITTING VCVTTPD2DQ (VCVTTPD2DQ-128-1)
+{
+ICLASS:      VCVTTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTTPD2DQ_XMMi32_MASKmskw_XMMf64_AVX512_VL128
+}
+
+{
+ICLASS:      VCVTTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2DQ_XMMi32_MASKmskw_MEMf64_AVX512_VL128
+}
+
+
+# EMITTING VCVTTPD2DQ (VCVTTPD2DQ-256-1)
+{
+ICLASS:      VCVTTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xE6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTTPD2DQ_XMMi32_MASKmskw_YMMf64_AVX512_VL256
+}
+
+{
+ICLASS:      VCVTTPD2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xE6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2DQ_XMMi32_MASKmskw_MEMf64_AVX512_VL256
+}
+
+
+# EMITTING VCVTTPD2QQ (VCVTTPD2QQ-128-1)
+{
+ICLASS:      VCVTTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTTPD2QQ_XMMi64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2QQ_XMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTPD2QQ (VCVTTPD2QQ-256-1)
+{
+ICLASS:      VCVTTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTTPD2QQ_YMMi64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2QQ_YMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTPD2QQ (VCVTTPD2QQ-512-1)
+{
+ICLASS:      VCVTTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTTPD2QQ_ZMMi64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTTPD2QQ_ZMMi64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTPD2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2QQ_ZMMi64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTPD2UDQ (VCVTTPD2UDQ-128-1)
+{
+ICLASS:      VCVTTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTTPD2UDQ_XMMu32_MASKmskw_XMMf64_AVX512_VL128
+}
+
+{
+ICLASS:      VCVTTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2UDQ_XMMu32_MASKmskw_MEMf64_AVX512_VL128
+}
+
+
+# EMITTING VCVTTPD2UDQ (VCVTTPD2UDQ-256-1)
+{
+ICLASS:      VCVTTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTTPD2UDQ_XMMu32_MASKmskw_YMMf64_AVX512_VL256
+}
+
+{
+ICLASS:      VCVTTPD2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2UDQ_XMMu32_MASKmskw_MEMf64_AVX512_VL256
+}
+
+
+# EMITTING VCVTTPD2UQQ (VCVTTPD2UQQ-128-1)
+{
+ICLASS:      VCVTTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VCVTTPD2UQQ_XMMu64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2UQQ_XMMu64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTPD2UQQ (VCVTTPD2UQQ-256-1)
+{
+ICLASS:      VCVTTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VCVTTPD2UQQ_YMMu64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2UQQ_YMMu64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTPD2UQQ (VCVTTPD2UQQ-512-1)
+{
+ICLASS:      VCVTTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTTPD2UQQ_ZMMu64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64
+IFORM:       VCVTTPD2UQQ_ZMMu64_MASKmskw_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VCVTTPD2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VCVTTPD2UQQ_ZMMu64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VCVTTPS2DQ (VCVTTPS2DQ-128-1)
+{
+ICLASS:      VCVTTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTTPS2DQ_XMMi32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2DQ_XMMi32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2DQ (VCVTTPS2DQ-256-1)
+{
+ICLASS:      VCVTTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5B VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTTPS2DQ_YMMi32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2DQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5B VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2DQ_YMMi32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2QQ (VCVTTPS2QQ-128-1)
+{
+ICLASS:      VCVTTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTTPS2QQ_XMMi64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2QQ_XMMi64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2QQ (VCVTTPS2QQ-256-1)
+{
+ICLASS:      VCVTTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTTPS2QQ_YMMi64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2QQ_YMMi64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2QQ (VCVTTPS2QQ-512-1)
+{
+ICLASS:      VCVTTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTTPS2QQ_ZMMi64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTTPS2QQ_ZMMi64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2QQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=ZMM_R3():w:zi64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2QQ_ZMMi64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2UDQ (VCVTTPS2UDQ-128-1)
+{
+ICLASS:      VCVTTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTTPS2UDQ_XMMu32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2UDQ_XMMu32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2UDQ (VCVTTPS2UDQ-256-1)
+{
+ICLASS:      VCVTTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTTPS2UDQ_YMMu32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2UDQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2UDQ_YMMu32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2UQQ (VCVTTPS2UQQ-128-1)
+{
+ICLASS:      VCVTTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTTPS2UQQ_XMMu64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2UQQ_XMMu64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2UQQ (VCVTTPS2UQQ-256-1)
+{
+ICLASS:      VCVTTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VCVTTPS2UQQ_YMMu64_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2UQQ_YMMu64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTTPS2UQQ (VCVTTPS2UQQ-512-1)
+{
+ICLASS:      VCVTTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTTPS2UQQ_ZMMu64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x78 V66 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VCVTTPS2UQQ_ZMMu64_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VCVTTPS2UQQ
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x78 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VCVTTPS2UQQ_ZMMu64_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VCVTUDQ2PD (VCVTUDQ2PD-128-1)
+{
+ICLASS:      VCVTUDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VCVTUDQ2PD_XMMf64_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VCVTUDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VCVTUDQ2PD_XMMf64_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VCVTUDQ2PD (VCVTUDQ2PD-256-1)
+{
+ICLASS:      VCVTUDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VCVTUDQ2PD_YMMf64_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VCVTUDQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALF BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALF()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VCVTUDQ2PD_YMMf64_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VCVTUDQ2PS (VCVTUDQ2PS-128-1)
+{
+ICLASS:      VCVTUDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VCVTUDQ2PS_XMMf32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VCVTUDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VCVTUDQ2PS_XMMf32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VCVTUDQ2PS (VCVTUDQ2PS-256-1)
+{
+ICLASS:      VCVTUDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32
+IFORM:       VCVTUDQ2PS_YMMf32_MASKmskw_YMMu32_AVX512
+}
+
+{
+ICLASS:      VCVTUDQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VCVTUDQ2PS_YMMf32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VCVTUQQ2PD (VCVTUQQ2PD-128-1)
+{
+ICLASS:      VCVTUQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VCVTUQQ2PD_XMMf64_MASKmskw_XMMu64_AVX512
+}
+
+{
+ICLASS:      VCVTUQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTUQQ2PD_XMMf64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VCVTUQQ2PD (VCVTUQQ2PD-256-1)
+{
+ICLASS:      VCVTUQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VCVTUQQ2PD_YMMf64_MASKmskw_YMMu64_AVX512
+}
+
+{
+ICLASS:      VCVTUQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTUQQ2PD_YMMf64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VCVTUQQ2PD (VCVTUQQ2PD-512-1)
+{
+ICLASS:      VCVTUQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VCVTUQQ2PD_ZMMf64_MASKmskw_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VCVTUQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF3 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VCVTUQQ2PD_ZMMf64_MASKmskw_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VCVTUQQ2PD
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTUQQ2PD_ZMMf64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VCVTUQQ2PS (VCVTUQQ2PS-128-1)
+{
+ICLASS:      VCVTUQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VCVTUQQ2PS_XMMf32_MASKmskw_XMMu64_AVX512_VL128
+}
+
+{
+ICLASS:      VCVTUQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTUQQ2PS_XMMf32_MASKmskw_MEMu64_AVX512_VL128
+}
+
+
+# EMITTING VCVTUQQ2PS (VCVTUQQ2PS-256-1)
+{
+ICLASS:      VCVTUQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VCVTUQQ2PS_XMMf32_MASKmskw_YMMu64_AVX512_VL256
+}
+
+{
+ICLASS:      VCVTUQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTUQQ2PS_XMMf32_MASKmskw_MEMu64_AVX512_VL256
+}
+
+
+# EMITTING VCVTUQQ2PS (VCVTUQQ2PS-512-1)
+{
+ICLASS:      VCVTUQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VCVTUQQ2PS_YMMf32_MASKmskw_ZMMu64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTUQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x7A VF2 V0F MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() AVX512_ROUND()  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32:TXT=ROUNDC REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu64
+IFORM:       VCVTUQQ2PS_YMMf32_MASKmskw_ZMMu64_AVX512_VL512
+}
+
+{
+ICLASS:      VCVTUQQ2PS
+CPL:         3
+CATEGORY:    CONVERT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x7A VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VCVTUQQ2PS_YMMf32_MASKmskw_MEMu64_AVX512_VL512
+}
+
+
+# EMITTING VDBPSADBW (VDBPSADBW-128-1)
+{
+ICLASS:      VDBPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x42 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8 IMM0:r:b
+IFORM:       VDBPSADBW_XMMu16_MASKmskw_XMMu8_XMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VDBPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x42 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8 IMM0:r:b
+IFORM:       VDBPSADBW_XMMu16_MASKmskw_XMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VDBPSADBW (VDBPSADBW-256-1)
+{
+ICLASS:      VDBPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x42 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8 IMM0:r:b
+IFORM:       VDBPSADBW_YMMu16_MASKmskw_YMMu8_YMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VDBPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x42 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8 IMM0:r:b
+IFORM:       VDBPSADBW_YMMu16_MASKmskw_YMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VDBPSADBW (VDBPSADBW-512-1)
+{
+ICLASS:      VDBPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x42 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8 IMM0:r:b
+IFORM:       VDBPSADBW_ZMMu16_MASKmskw_ZMMu8_ZMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VDBPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x42 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8 IMM0:r:b
+IFORM:       VDBPSADBW_ZMMu16_MASKmskw_ZMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VDIVPD (VDIVPD-128-1)
+{
+ICLASS:      VDIVPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5E V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VDIVPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VDIVPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5E V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VDIVPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VDIVPD (VDIVPD-256-1)
+{
+ICLASS:      VDIVPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5E V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VDIVPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VDIVPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5E V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VDIVPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VDIVPS (VDIVPS-128-1)
+{
+ICLASS:      VDIVPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5E VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VDIVPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VDIVPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5E VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VDIVPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VDIVPS (VDIVPS-256-1)
+{
+ICLASS:      VDIVPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5E VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VDIVPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VDIVPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5E VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VDIVPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VEXPANDPD (VEXPANDPD-128-1)
+{
+ICLASS:      VEXPANDPD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x88 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f64
+IFORM:       VEXPANDPD_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VEXPANDPD (VEXPANDPD-128-2)
+{
+ICLASS:      VEXPANDPD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x88 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VEXPANDPD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VEXPANDPD (VEXPANDPD-256-1)
+{
+ICLASS:      VEXPANDPD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x88 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f64
+IFORM:       VEXPANDPD_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VEXPANDPD (VEXPANDPD-256-2)
+{
+ICLASS:      VEXPANDPD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x88 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VEXPANDPD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+
+# EMITTING VEXPANDPS (VEXPANDPS-128-1)
+{
+ICLASS:      VEXPANDPS
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x88 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f32
+IFORM:       VEXPANDPS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VEXPANDPS (VEXPANDPS-128-2)
+{
+ICLASS:      VEXPANDPS
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x88 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VEXPANDPS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VEXPANDPS (VEXPANDPS-256-1)
+{
+ICLASS:      VEXPANDPS
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x88 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f32
+IFORM:       VEXPANDPS_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VEXPANDPS (VEXPANDPS-256-2)
+{
+ICLASS:      VEXPANDPS
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x88 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VEXPANDPS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+
+# EMITTING VEXTRACTF32X4 (VEXTRACTF32X4-256-1)
+{
+ICLASS:      VEXTRACTF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x19 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f32 IMM0:r:b
+IFORM:       VEXTRACTF32X4_XMMf32_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF32X4 (VEXTRACTF32X4-256-2)
+{
+ICLASS:      VEXTRACTF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x19 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    MEM0:w:dq:f32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f32 IMM0:r:b
+IFORM:       VEXTRACTF32X4_MEMf32_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF32X8 (VEXTRACTF32X8-512-1)
+{
+ICLASS:      VEXTRACTF32X8
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1B V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_B3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf32 IMM0:r:b
+IFORM:       VEXTRACTF32X8_YMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF32X8 (VEXTRACTF32X8-512-2)
+{
+ICLASS:      VEXTRACTF32X8
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE8
+PATTERN:    EVV 0x1B V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_TUPLE8()
+OPERANDS:    MEM0:w:qq:f32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf32 IMM0:r:b
+IFORM:       VEXTRACTF32X8_MEMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF64X2 (VEXTRACTF64X2-256-1)
+{
+ICLASS:      VEXTRACTF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x19 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f64 IMM0:r:b
+IFORM:       VEXTRACTF64X2_XMMf64_MASKmskw_YMMf64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF64X2 (VEXTRACTF64X2-256-2)
+{
+ICLASS:      VEXTRACTF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x19 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    MEM0:w:dq:f64 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f64 IMM0:r:b
+IFORM:       VEXTRACTF64X2_MEMf64_MASKmskw_YMMf64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF64X2 (VEXTRACTF64X2-512-1)
+{
+ICLASS:      VEXTRACTF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x19 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zf64 IMM0:r:b
+IFORM:       VEXTRACTF64X2_XMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTF64X2 (VEXTRACTF64X2-512-2)
+{
+ICLASS:      VEXTRACTF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x19 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    MEM0:w:dq:f64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zf64 IMM0:r:b
+IFORM:       VEXTRACTF64X2_MEMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI32X4 (VEXTRACTI32X4-256-1)
+{
+ICLASS:      VEXTRACTI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u32 IMM0:r:b
+IFORM:       VEXTRACTI32X4_XMMu32_MASKmskw_YMMu32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI32X4 (VEXTRACTI32X4-256-2)
+{
+ICLASS:      VEXTRACTI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x39 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    MEM0:w:dq:u32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u32 IMM0:r:b
+IFORM:       VEXTRACTI32X4_MEMu32_MASKmskw_YMMu32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI32X8 (VEXTRACTI32X8-512-1)
+{
+ICLASS:      VEXTRACTI32X8
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3B V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_B3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu32 IMM0:r:b
+IFORM:       VEXTRACTI32X8_YMMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI32X8 (VEXTRACTI32X8-512-2)
+{
+ICLASS:      VEXTRACTI32X8
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE8
+PATTERN:    EVV 0x3B V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_TUPLE8()
+OPERANDS:    MEM0:w:qq:u32 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu32 IMM0:r:b
+IFORM:       VEXTRACTI32X8_MEMu32_MASKmskw_ZMMu32_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI64X2 (VEXTRACTI64X2-256-1)
+{
+ICLASS:      VEXTRACTI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64 IMM0:r:b
+IFORM:       VEXTRACTI64X2_XMMu64_MASKmskw_YMMu64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI64X2 (VEXTRACTI64X2-256-2)
+{
+ICLASS:      VEXTRACTI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x39 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    MEM0:w:dq:u64 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64 IMM0:r:b
+IFORM:       VEXTRACTI64X2_MEMu64_MASKmskw_YMMu64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI64X2 (VEXTRACTI64X2-512-1)
+{
+ICLASS:      VEXTRACTI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_B3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu64 IMM0:r:b
+IFORM:       VEXTRACTI64X2_XMMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+
+# EMITTING VEXTRACTI64X2 (VEXTRACTI64X2-512-2)
+{
+ICLASS:      VEXTRACTI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x39 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    MEM0:w:dq:u64 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu64 IMM0:r:b
+IFORM:       VEXTRACTI64X2_MEMu64_MASKmskw_ZMMu64_IMM8_AVX512
+}
+
+
+# EMITTING VFIXUPIMMPD (VFIXUPIMMPD-128-1)
+{
+ICLASS:      VFIXUPIMMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x54 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VFIXUPIMMPD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFIXUPIMMPD_XMMf64_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VFIXUPIMMPD (VFIXUPIMMPD-256-1)
+{
+ICLASS:      VFIXUPIMMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x54 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VFIXUPIMMPD_YMMf64_MASKmskw_YMMf64_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFIXUPIMMPD_YMMf64_MASKmskw_YMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VFIXUPIMMPS (VFIXUPIMMPS-128-1)
+{
+ICLASS:      VFIXUPIMMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x54 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VFIXUPIMMPS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFIXUPIMMPS_XMMf32_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VFIXUPIMMPS (VFIXUPIMMPS-256-1)
+{
+ICLASS:      VFIXUPIMMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x54 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VFIXUPIMMPS_YMMf32_MASKmskw_YMMf32_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFIXUPIMMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x54 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFIXUPIMMPS_YMMf32_MASKmskw_YMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VFMADD132PD (VFMADD132PD-128-1)
+{
+ICLASS:      VFMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x98 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD132PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x98 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD132PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD132PD (VFMADD132PD-256-1)
+{
+ICLASS:      VFMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x98 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMADD132PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x98 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD132PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD132PS (VFMADD132PS-128-1)
+{
+ICLASS:      VFMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x98 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD132PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x98 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD132PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD132PS (VFMADD132PS-256-1)
+{
+ICLASS:      VFMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x98 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMADD132PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x98 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD132PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD213PD (VFMADD213PD-128-1)
+{
+ICLASS:      VFMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD213PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD213PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD213PD (VFMADD213PD-256-1)
+{
+ICLASS:      VFMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMADD213PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD213PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD213PS (VFMADD213PS-128-1)
+{
+ICLASS:      VFMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD213PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD213PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD213PS (VFMADD213PS-256-1)
+{
+ICLASS:      VFMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMADD213PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD213PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD231PD (VFMADD231PD-128-1)
+{
+ICLASS:      VFMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADD231PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD231PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD231PD (VFMADD231PD-256-1)
+{
+ICLASS:      VFMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMADD231PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADD231PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADD231PS (VFMADD231PS-128-1)
+{
+ICLASS:      VFMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADD231PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD231PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADD231PS (VFMADD231PS-256-1)
+{
+ICLASS:      VFMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMADD231PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB8 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADD231PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB132PD (VFMADDSUB132PD-128-1)
+{
+ICLASS:      VFMADDSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x96 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADDSUB132PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x96 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB132PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB132PD (VFMADDSUB132PD-256-1)
+{
+ICLASS:      VFMADDSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x96 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMADDSUB132PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x96 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB132PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB132PS (VFMADDSUB132PS-128-1)
+{
+ICLASS:      VFMADDSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x96 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADDSUB132PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x96 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB132PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB132PS (VFMADDSUB132PS-256-1)
+{
+ICLASS:      VFMADDSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x96 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMADDSUB132PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x96 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB132PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB213PD (VFMADDSUB213PD-128-1)
+{
+ICLASS:      VFMADDSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADDSUB213PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB213PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB213PD (VFMADDSUB213PD-256-1)
+{
+ICLASS:      VFMADDSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMADDSUB213PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB213PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB213PS (VFMADDSUB213PS-128-1)
+{
+ICLASS:      VFMADDSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADDSUB213PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB213PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB213PS (VFMADDSUB213PS-256-1)
+{
+ICLASS:      VFMADDSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMADDSUB213PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB213PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB231PD (VFMADDSUB231PD-128-1)
+{
+ICLASS:      VFMADDSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMADDSUB231PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB231PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB231PD (VFMADDSUB231PD-256-1)
+{
+ICLASS:      VFMADDSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMADDSUB231PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMADDSUB231PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMADDSUB231PS (VFMADDSUB231PS-128-1)
+{
+ICLASS:      VFMADDSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMADDSUB231PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB231PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMADDSUB231PS (VFMADDSUB231PS-256-1)
+{
+ICLASS:      VFMADDSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMADDSUB231PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMADDSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB6 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMADDSUB231PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB132PD (VFMSUB132PD-128-1)
+{
+ICLASS:      VFMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB132PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB132PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB132PD (VFMSUB132PD-256-1)
+{
+ICLASS:      VFMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMSUB132PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB132PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB132PS (VFMSUB132PS-128-1)
+{
+ICLASS:      VFMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB132PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB132PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB132PS (VFMSUB132PS-256-1)
+{
+ICLASS:      VFMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMSUB132PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB132PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB213PD (VFMSUB213PD-128-1)
+{
+ICLASS:      VFMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB213PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB213PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB213PD (VFMSUB213PD-256-1)
+{
+ICLASS:      VFMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMSUB213PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB213PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB213PS (VFMSUB213PS-128-1)
+{
+ICLASS:      VFMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB213PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB213PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB213PS (VFMSUB213PS-256-1)
+{
+ICLASS:      VFMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMSUB213PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB213PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB231PD (VFMSUB231PD-128-1)
+{
+ICLASS:      VFMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUB231PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB231PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB231PD (VFMSUB231PD-256-1)
+{
+ICLASS:      VFMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMSUB231PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUB231PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUB231PS (VFMSUB231PS-128-1)
+{
+ICLASS:      VFMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUB231PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB231PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUB231PS (VFMSUB231PS-256-1)
+{
+ICLASS:      VFMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBA V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMSUB231PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBA V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUB231PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD132PD (VFMSUBADD132PD-128-1)
+{
+ICLASS:      VFMSUBADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x97 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUBADD132PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x97 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD132PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD132PD (VFMSUBADD132PD-256-1)
+{
+ICLASS:      VFMSUBADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x97 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMSUBADD132PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x97 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD132PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD132PS (VFMSUBADD132PS-128-1)
+{
+ICLASS:      VFMSUBADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x97 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUBADD132PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x97 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD132PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD132PS (VFMSUBADD132PS-256-1)
+{
+ICLASS:      VFMSUBADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x97 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMSUBADD132PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x97 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD132PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD213PD (VFMSUBADD213PD-128-1)
+{
+ICLASS:      VFMSUBADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUBADD213PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD213PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD213PD (VFMSUBADD213PD-256-1)
+{
+ICLASS:      VFMSUBADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMSUBADD213PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD213PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD213PS (VFMSUBADD213PS-128-1)
+{
+ICLASS:      VFMSUBADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUBADD213PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD213PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD213PS (VFMSUBADD213PS-256-1)
+{
+ICLASS:      VFMSUBADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMSUBADD213PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xA7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD213PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD231PD (VFMSUBADD231PD-128-1)
+{
+ICLASS:      VFMSUBADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFMSUBADD231PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD231PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD231PD (VFMSUBADD231PD-256-1)
+{
+ICLASS:      VFMSUBADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFMSUBADD231PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFMSUBADD231PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFMSUBADD231PS (VFMSUBADD231PS-128-1)
+{
+ICLASS:      VFMSUBADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFMSUBADD231PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD231PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFMSUBADD231PS (VFMSUBADD231PS-256-1)
+{
+ICLASS:      VFMSUBADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFMSUBADD231PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFMSUBADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xB7 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFMSUBADD231PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD132PD (VFNMADD132PD-128-1)
+{
+ICLASS:      VFNMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD132PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD132PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD132PD (VFNMADD132PD-256-1)
+{
+ICLASS:      VFNMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFNMADD132PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD132PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD132PS (VFNMADD132PS-128-1)
+{
+ICLASS:      VFNMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD132PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD132PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD132PS (VFNMADD132PS-256-1)
+{
+ICLASS:      VFNMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFNMADD132PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD132PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD213PD (VFNMADD213PD-128-1)
+{
+ICLASS:      VFNMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD213PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD213PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD213PD (VFNMADD213PD-256-1)
+{
+ICLASS:      VFNMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFNMADD213PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD213PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD213PS (VFNMADD213PS-128-1)
+{
+ICLASS:      VFNMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD213PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD213PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD213PS (VFNMADD213PS-256-1)
+{
+ICLASS:      VFNMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFNMADD213PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD213PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD231PD (VFNMADD231PD-128-1)
+{
+ICLASS:      VFNMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMADD231PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD231PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD231PD (VFNMADD231PD-256-1)
+{
+ICLASS:      VFNMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFNMADD231PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMADD231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMADD231PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMADD231PS (VFNMADD231PS-128-1)
+{
+ICLASS:      VFNMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMADD231PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD231PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMADD231PS (VFNMADD231PS-256-1)
+{
+ICLASS:      VFNMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFNMADD231PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMADD231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMADD231PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB132PD (VFNMSUB132PD-128-1)
+{
+ICLASS:      VFNMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB132PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB132PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB132PD (VFNMSUB132PD-256-1)
+{
+ICLASS:      VFNMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFNMSUB132PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB132PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB132PS (VFNMSUB132PS-128-1)
+{
+ICLASS:      VFNMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB132PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB132PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB132PS (VFNMSUB132PS-256-1)
+{
+ICLASS:      VFNMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x9E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFNMSUB132PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB132PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x9E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB132PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB213PD (VFNMSUB213PD-128-1)
+{
+ICLASS:      VFNMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB213PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB213PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB213PD (VFNMSUB213PD-256-1)
+{
+ICLASS:      VFNMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFNMSUB213PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB213PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB213PS (VFNMSUB213PS-128-1)
+{
+ICLASS:      VFNMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB213PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB213PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB213PS (VFNMSUB213PS-256-1)
+{
+ICLASS:      VFNMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xAE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFNMSUB213PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB213PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xAE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB213PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB231PD (VFNMSUB231PD-128-1)
+{
+ICLASS:      VFNMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VFNMSUB231PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB231PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB231PD (VFNMSUB231PD-256-1)
+{
+ICLASS:      VFNMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VFNMSUB231PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231PD
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VFNMSUB231PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VFNMSUB231PS (VFNMSUB231PS-128-1)
+{
+ICLASS:      VFNMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VFNMSUB231PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB231PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFNMSUB231PS (VFNMSUB231PS-256-1)
+{
+ICLASS:      VFNMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0xBE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VFNMSUB231PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VFNMSUB231PS
+CPL:         3
+CATEGORY:    VFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0xBE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VFNMSUB231PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VFPCLASSPD (VFPCLASSPD-128-1)
+{
+ICLASS:      VFPCLASSPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x66 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR  ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VFPCLASSPD_MASKmskw_MASKmskw_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFPCLASSPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFPCLASSPD_MASKmskw_MASKmskw_MEMf64_IMM8_AVX512_VL128
+}
+
+
+# EMITTING VFPCLASSPD (VFPCLASSPD-256-1)
+{
+ICLASS:      VFPCLASSPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x66 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR  ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VFPCLASSPD_MASKmskw_MASKmskw_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFPCLASSPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFPCLASSPD_MASKmskw_MASKmskw_MEMf64_IMM8_AVX512_VL256
+}
+
+
+# EMITTING VFPCLASSPD (VFPCLASSPD-512-1)
+{
+ICLASS:      VFPCLASSPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x66 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR  ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VFPCLASSPD_MASKmskw_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFPCLASSPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFPCLASSPD_MASKmskw_MASKmskw_MEMf64_IMM8_AVX512_VL512
+}
+
+
+# EMITTING VFPCLASSPS (VFPCLASSPS-128-1)
+{
+ICLASS:      VFPCLASSPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x66 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VFPCLASSPS_MASKmskw_MASKmskw_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFPCLASSPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFPCLASSPS_MASKmskw_MASKmskw_MEMf32_IMM8_AVX512_VL128
+}
+
+
+# EMITTING VFPCLASSPS (VFPCLASSPS-256-1)
+{
+ICLASS:      VFPCLASSPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x66 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR  ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VFPCLASSPS_MASKmskw_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFPCLASSPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFPCLASSPS_MASKmskw_MASKmskw_MEMf32_IMM8_AVX512_VL256
+}
+
+
+# EMITTING VFPCLASSPS (VFPCLASSPS-512-1)
+{
+ICLASS:      VFPCLASSPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x66 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR  ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VFPCLASSPS_MASKmskw_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFPCLASSPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VFPCLASSPS_MASKmskw_MASKmskw_MEMf32_IMM8_AVX512_VL512
+}
+
+
+# EMITTING VFPCLASSSD (VFPCLASSSD-128-1)
+{
+ICLASS:      VFPCLASSSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x67 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1  NOEVSR  ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VFPCLASSSD_MASKmskw_MASKmskw_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VFPCLASSSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x67 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1  NOEVSR  ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw MEM0:r:q:f64 IMM0:r:b
+IFORM:       VFPCLASSSD_MASKmskw_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VFPCLASSSS (VFPCLASSSS-128-1)
+{
+ICLASS:      VFPCLASSSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x67 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0  NOEVSR  ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VFPCLASSSS_MASKmskw_MASKmskw_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VFPCLASSSS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x67 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0  NOEVSR  ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw MEM0:r:d:f32 IMM0:r:b
+IFORM:       VFPCLASSSS_MASKmskw_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VGATHERDPD (VGATHERDPD-128-2)
+{
+ICLASS:      VGATHERDPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x92 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASKNOT0():rw:mskw MEM0:r:q:f64
+IFORM:       VGATHERDPD_XMMf64_MASKmskw_MEMf64_AVX512_VL128
+}
+
+
+# EMITTING VGATHERDPD (VGATHERDPD-256-2)
+{
+ICLASS:      VGATHERDPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x92 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASKNOT0():rw:mskw MEM0:r:q:f64
+IFORM:       VGATHERDPD_YMMf64_MASKmskw_MEMf64_AVX512_VL256
+}
+
+
+# EMITTING VGATHERDPS (VGATHERDPS-128-2)
+{
+ICLASS:      VGATHERDPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x92 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W0 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASKNOT0():rw:mskw MEM0:r:d:f32
+IFORM:       VGATHERDPS_XMMf32_MASKmskw_MEMf32_AVX512_VL128
+}
+
+
+# EMITTING VGATHERDPS (VGATHERDPS-256-2)
+{
+ICLASS:      VGATHERDPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x92 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W0 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASKNOT0():rw:mskw MEM0:r:d:f32
+IFORM:       VGATHERDPS_YMMf32_MASKmskw_MEMf32_AVX512_VL256
+}
+
+
+# EMITTING VGATHERQPD (VGATHERQPD-128-2)
+{
+ICLASS:      VGATHERQPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x93 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASKNOT0():rw:mskw MEM0:r:q:f64
+IFORM:       VGATHERQPD_XMMf64_MASKmskw_MEMf64_AVX512_VL128
+}
+
+
+# EMITTING VGATHERQPD (VGATHERQPD-256-2)
+{
+ICLASS:      VGATHERQPD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x93 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W1 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASKNOT0():rw:mskw MEM0:r:q:f64
+IFORM:       VGATHERQPD_YMMf64_MASKmskw_MEMf64_AVX512_VL256
+}
+
+
+# EMITTING VGATHERQPS (VGATHERQPS-128-2)
+{
+ICLASS:      VGATHERQPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x93 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W0 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASKNOT0():rw:mskw MEM0:r:d:f32
+IFORM:       VGATHERQPS_XMMf32_MASKmskw_MEMf32_AVX512_VL128
+}
+
+
+# EMITTING VGATHERQPS (VGATHERQPS-256-2)
+{
+ICLASS:      VGATHERQPS
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x93 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W0 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASKNOT0():rw:mskw MEM0:r:d:f32
+IFORM:       VGATHERQPS_XMMf32_MASKmskw_MEMf32_AVX512_VL256
+}
+
+
+# EMITTING VGETEXPPD (VGETEXPPD-128-1)
+{
+ICLASS:      VGETEXPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x42 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VGETEXPPD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VGETEXPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x42 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VGETEXPPD_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VGETEXPPD (VGETEXPPD-256-1)
+{
+ICLASS:      VGETEXPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x42 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VGETEXPPD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VGETEXPPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x42 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VGETEXPPD_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VGETEXPPS (VGETEXPPS-128-1)
+{
+ICLASS:      VGETEXPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x42 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VGETEXPPS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VGETEXPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x42 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VGETEXPPS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VGETEXPPS (VGETEXPPS-256-1)
+{
+ICLASS:      VGETEXPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x42 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VGETEXPPS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VGETEXPPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x42 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VGETEXPPS_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VGETMANTPD (VGETMANTPD-128-1)
+{
+ICLASS:      VGETMANTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x26 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VGETMANTPD_XMMf64_MASKmskw_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x26 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGETMANTPD_XMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VGETMANTPD (VGETMANTPD-256-1)
+{
+ICLASS:      VGETMANTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x26 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VGETMANTPD_YMMf64_MASKmskw_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x26 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGETMANTPD_YMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VGETMANTPS (VGETMANTPS-128-1)
+{
+ICLASS:      VGETMANTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x26 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VGETMANTPS_XMMf32_MASKmskw_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x26 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGETMANTPS_XMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VGETMANTPS (VGETMANTPS-256-1)
+{
+ICLASS:      VGETMANTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x26 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VGETMANTPS_YMMf32_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VGETMANTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x26 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGETMANTPS_YMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTF32X4 (VINSERTF32X4-256-1)
+{
+ICLASS:      VINSERTF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x18 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VINSERTF32X4_YMMf32_MASKmskw_YMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x18 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:dq:f32 IMM0:r:b
+IFORM:       VINSERTF32X4_YMMf32_MASKmskw_YMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTF32X8 (VINSERTF32X8-512-1)
+{
+ICLASS:      VINSERTF32X8
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1A V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VINSERTF32X8_ZMMf32_MASKmskw_ZMMf32_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTF32X8
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE8
+PATTERN:    EVV 0x1A V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_TUPLE8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:qq:f32 IMM0:r:b
+IFORM:       VINSERTF32X8_ZMMf32_MASKmskw_ZMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTF64X2 (VINSERTF64X2-256-1)
+{
+ICLASS:      VINSERTF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x18 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VINSERTF64X2_YMMf64_MASKmskw_YMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x18 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:dq:f64 IMM0:r:b
+IFORM:       VINSERTF64X2_YMMf64_MASKmskw_YMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTF64X2 (VINSERTF64X2-512-1)
+{
+ICLASS:      VINSERTF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x18 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VINSERTF64X2_ZMMf64_MASKmskw_ZMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x18 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:dq:f64 IMM0:r:b
+IFORM:       VINSERTF64X2_ZMMf64_MASKmskw_ZMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTI32X4 (VINSERTI32X4-256-1)
+{
+ICLASS:      VINSERTI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x38 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VINSERTI32X4_YMMu32_MASKmskw_YMMu32_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE4
+PATTERN:    EVV 0x38 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_TUPLE4()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:dq:u32 IMM0:r:b
+IFORM:       VINSERTI32X4_YMMu32_MASKmskw_YMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTI32X8 (VINSERTI32X8-512-1)
+{
+ICLASS:      VINSERTI32X8
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3A V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VINSERTI32X8_ZMMu32_MASKmskw_ZMMu32_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTI32X8
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE8
+PATTERN:    EVV 0x3A V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_TUPLE8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:qq:u32 IMM0:r:b
+IFORM:       VINSERTI32X8_ZMMu32_MASKmskw_ZMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTI64X2 (VINSERTI64X2-256-1)
+{
+ICLASS:      VINSERTI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x38 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VINSERTI64X2_YMMu64_MASKmskw_YMMu64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x38 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:dq:u64 IMM0:r:b
+IFORM:       VINSERTI64X2_YMMu64_MASKmskw_YMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VINSERTI64X2 (VINSERTI64X2-512-1)
+{
+ICLASS:      VINSERTI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x38 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VINSERTI64X2_ZMMu64_MASKmskw_ZMMu64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VINSERTI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_TUPLE2
+PATTERN:    EVV 0x38 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_TUPLE2()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:dq:u64 IMM0:r:b
+IFORM:       VINSERTI64X2_ZMMu64_MASKmskw_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VMAXPD (VMAXPD-128-1)
+{
+ICLASS:      VMAXPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMAXPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMAXPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMAXPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMAXPD (VMAXPD-256-1)
+{
+ICLASS:      VMAXPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VMAXPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VMAXPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMAXPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMAXPS (VMAXPS-128-1)
+{
+ICLASS:      VMAXPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5F VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMAXPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMAXPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5F VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMAXPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMAXPS (VMAXPS-256-1)
+{
+ICLASS:      VMAXPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5F VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VMAXPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VMAXPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5F VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMAXPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMINPD (VMINPD-128-1)
+{
+ICLASS:      VMINPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5D V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMINPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMINPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5D V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMINPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMINPD (VMINPD-256-1)
+{
+ICLASS:      VMINPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5D V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VMINPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VMINPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5D V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMINPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMINPS (VMINPS-128-1)
+{
+ICLASS:      VMINPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5D VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMINPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMINPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5D VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMINPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMINPS (VMINPS-256-1)
+{
+ICLASS:      VMINPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5D VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VMINPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VMINPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5D VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMINPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-128-1)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VMOVAPD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x28 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f64
+IFORM:       VMOVAPD_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-128-2)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:f64
+IFORM:       VMOVAPD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-128-3)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x29 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:f64 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f64
+IFORM:       VMOVAPD_MEMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-256-1)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VMOVAPD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x28 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f64
+IFORM:       VMOVAPD_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-256-2)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f64
+IFORM:       VMOVAPD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+
+# EMITTING VMOVAPD (VMOVAPD-256-3)
+{
+ICLASS:      VMOVAPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x29 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:f64 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f64
+IFORM:       VMOVAPD_MEMf64_MASKmskw_YMMf64_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-128-1)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VMOVAPS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x28 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f32
+IFORM:       VMOVAPS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-128-2)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:f32
+IFORM:       VMOVAPS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-128-3)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x29 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:f32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f32
+IFORM:       VMOVAPS_MEMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-256-1)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VMOVAPS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x28 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f32
+IFORM:       VMOVAPS_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-256-2)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f32
+IFORM:       VMOVAPS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+
+# EMITTING VMOVAPS (VMOVAPS-256-3)
+{
+ICLASS:      VMOVAPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x29 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:f32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f32
+IFORM:       VMOVAPS_MEMf32_MASKmskw_YMMf32_AVX512
+}
+
+
+# EMITTING VMOVDDUP (VMOVDDUP-128-1)
+{
+ICLASS:      VMOVDDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VMOVDDUP_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVDDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MOVDDUP
+PATTERN:    EVV 0x12 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_MOVDDUP()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:f64
+IFORM:       VMOVDDUP_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVDDUP (VMOVDDUP-256-1)
+{
+ICLASS:      VMOVDDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VMOVDDUP_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVDDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MOVDDUP
+PATTERN:    EVV 0x12 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_MOVDDUP()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f64
+IFORM:       VMOVDDUP_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-128-1)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VMOVDQA32_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x6F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u32
+IFORM:       VMOVDQA32_XMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-128-2)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u32
+IFORM:       VMOVDQA32_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-128-3)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x7F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:u32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VMOVDQA32_MEMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-256-1)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32
+IFORM:       VMOVDQA32_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x6F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u32
+IFORM:       VMOVDQA32_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-256-2)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u32
+IFORM:       VMOVDQA32_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA32 (VMOVDQA32-256-3)
+{
+ICLASS:      VMOVDQA32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x7F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:u32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VMOVDQA32_MEMu32_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-128-1)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VMOVDQA64_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x6F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u64
+IFORM:       VMOVDQA64_XMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-128-2)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VMOVDQA64_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-128-3)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x7F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:u64 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VMOVDQA64_MEMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-256-1)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VMOVDQA64_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x6F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u64
+IFORM:       VMOVDQA64_YMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-256-2)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VMOVDQA64_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQA64 (VMOVDQA64-256-3)
+{
+ICLASS:      VMOVDQA64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x7F V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:u64 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VMOVDQA64_MEMu64_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-128-1)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16
+IFORM:       VMOVDQU16_XMMu16_MASKmskw_XMMu16_AVX512
+}
+
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u16
+IFORM:       VMOVDQU16_XMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-128-2)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u16
+IFORM:       VMOVDQU16_XMMu16_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-128-3)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:u16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u16
+IFORM:       VMOVDQU16_MEMu16_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-256-1)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u16
+IFORM:       VMOVDQU16_YMMu16_MASKmskw_YMMu16_AVX512
+}
+
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u16
+IFORM:       VMOVDQU16_YMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-256-2)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u16
+IFORM:       VMOVDQU16_YMMu16_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-256-3)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:u16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u16
+IFORM:       VMOVDQU16_MEMu16_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-512-1)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu16
+IFORM:       VMOVDQU16_ZMMu16_MASKmskw_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u16
+IFORM:       VMOVDQU16_ZMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-512-2)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu16
+IFORM:       VMOVDQU16_ZMMu16_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU16 (VMOVDQU16-512-3)
+{
+ICLASS:      VMOVDQU16
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:u16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu16
+IFORM:       VMOVDQU16_MEMu16_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-128-1)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VMOVDQU32_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u32
+IFORM:       VMOVDQU32_XMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-128-2)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u32
+IFORM:       VMOVDQU32_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-128-3)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:u32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VMOVDQU32_MEMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-256-1)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32
+IFORM:       VMOVDQU32_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u32
+IFORM:       VMOVDQU32_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-256-2)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u32
+IFORM:       VMOVDQU32_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU32 (VMOVDQU32-256-3)
+{
+ICLASS:      VMOVDQU32
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:u32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VMOVDQU32_MEMu32_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-128-1)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VMOVDQU64_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u64
+IFORM:       VMOVDQU64_XMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-128-2)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VMOVDQU64_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-128-3)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:u64 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VMOVDQU64_MEMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-256-1)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VMOVDQU64_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u64
+IFORM:       VMOVDQU64_YMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-256-2)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VMOVDQU64_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU64 (VMOVDQU64-256-3)
+{
+ICLASS:      VMOVDQU64
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:u64 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VMOVDQU64_MEMu64_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-128-1)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u8
+IFORM:       VMOVDQU8_XMMu8_MASKmskw_XMMu8_AVX512
+}
+
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u8
+IFORM:       VMOVDQU8_XMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-128-2)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u8
+IFORM:       VMOVDQU8_XMMu8_MASKmskw_XMMu8_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-128-3)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:u8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u8
+IFORM:       VMOVDQU8_MEMu8_MASKmskw_XMMu8_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-256-1)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u8
+IFORM:       VMOVDQU8_YMMu8_MASKmskw_YMMu8_AVX512
+}
+
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u8
+IFORM:       VMOVDQU8_YMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-256-2)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u8
+IFORM:       VMOVDQU8_YMMu8_MASKmskw_YMMu8_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-256-3)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:u8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u8
+IFORM:       VMOVDQU8_MEMu8_MASKmskw_YMMu8_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-512-1)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu8
+IFORM:       VMOVDQU8_ZMMu8_MASKmskw_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x6F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u8
+IFORM:       VMOVDQU8_ZMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-512-2)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu8
+IFORM:       VMOVDQU8_ZMMu8_MASKmskw_ZMMu8_AVX512
+}
+
+
+# EMITTING VMOVDQU8 (VMOVDQU8-512-3)
+{
+ICLASS:      VMOVDQU8
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7F VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:zd:u8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu8
+IFORM:       VMOVDQU8_MEMu8_MASKmskw_ZMMu8_AVX512
+}
+
+
+# EMITTING VMOVNTDQ (VMOVNTDQ-128-1)
+{
+ICLASS:      VMOVNTDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0xE7 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:u32 REG0=XMM_R3():r:dq:u32
+IFORM:       VMOVNTDQ_MEMu32_XMMu32_AVX512
+}
+
+
+# EMITTING VMOVNTDQ (VMOVNTDQ-256-1)
+{
+ICLASS:      VMOVNTDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0xE7 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:u32 REG0=YMM_R3():r:qq:u32
+IFORM:       VMOVNTDQ_MEMu32_YMMu32_AVX512
+}
+
+
+# EMITTING VMOVNTDQA (VMOVNTDQA-128-1)
+{
+ICLASS:      VMOVNTDQA
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 MEM0:r:dq:u32
+IFORM:       VMOVNTDQA_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVNTDQA (VMOVNTDQA-256-1)
+{
+ICLASS:      VMOVNTDQA
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 MEM0:r:qq:u32
+IFORM:       VMOVNTDQA_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VMOVNTPD (VMOVNTPD-128-1)
+{
+ICLASS:      VMOVNTPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:f64 REG0=XMM_R3():r:dq:f64
+IFORM:       VMOVNTPD_MEMf64_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVNTPD (VMOVNTPD-256-1)
+{
+ICLASS:      VMOVNTPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0 MASK=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:f64 REG0=YMM_R3():r:qq:f64
+IFORM:       VMOVNTPD_MEMf64_YMMf64_AVX512
+}
+
+
+# EMITTING VMOVNTPS (VMOVNTPS-128-1)
+{
+ICLASS:      VMOVNTPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:f32 REG0=XMM_R3():r:dq:f32
+IFORM:       VMOVNTPS_MEMf32_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVNTPS (VMOVNTPS-256-1)
+{
+ICLASS:      VMOVNTPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E1NF
+REAL_OPCODE: Y
+ATTRIBUTES:  NOTSX REQUIRES_ALIGNMENT DISP8_FULLMEM
+PATTERN:    EVV 0x2B VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0 MASK=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:f32 REG0=YMM_R3():r:qq:f32
+IFORM:       VMOVNTPS_MEMf32_YMMf32_AVX512
+}
+
+
+# EMITTING VMOVSHDUP (VMOVSHDUP-128-1)
+{
+ICLASS:      VMOVSHDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x16 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VMOVSHDUP_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVSHDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x16 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f32
+IFORM:       VMOVSHDUP_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVSHDUP (VMOVSHDUP-256-1)
+{
+ICLASS:      VMOVSHDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x16 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VMOVSHDUP_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVSHDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x16 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f32
+IFORM:       VMOVSHDUP_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVSLDUP (VMOVSLDUP-128-1)
+{
+ICLASS:      VMOVSLDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VMOVSLDUP_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVSLDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x12 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f32
+IFORM:       VMOVSLDUP_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVSLDUP (VMOVSLDUP-256-1)
+{
+ICLASS:      VMOVSLDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VMOVSLDUP_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVSLDUP
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x12 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f32
+IFORM:       VMOVSLDUP_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-128-1)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VMOVUPD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f64
+IFORM:       VMOVUPD_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-128-2)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:f64
+IFORM:       VMOVUPD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-128-3)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:f64 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f64
+IFORM:       VMOVUPD_MEMf64_MASKmskw_XMMf64_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-256-1)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VMOVUPD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f64
+IFORM:       VMOVUPD_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-256-2)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f64
+IFORM:       VMOVUPD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+
+# EMITTING VMOVUPD (VMOVUPD-256-3)
+{
+ICLASS:      VMOVUPD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:f64 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f64
+IFORM:       VMOVUPD_MEMf64_MASKmskw_YMMf64_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-128-1)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VMOVUPS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:f32
+IFORM:       VMOVUPS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-128-2)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:f32
+IFORM:       VMOVUPS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-128-3)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:dq:f32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:f32
+IFORM:       VMOVUPS_MEMf32_MASKmskw_XMMf32_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-256-1)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VMOVUPS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:f32
+IFORM:       VMOVUPS_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-256-2)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:f32
+IFORM:       VMOVUPS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+
+# EMITTING VMOVUPS (VMOVUPS-256-3)
+{
+ICLASS:      VMOVUPS
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_FULLMEM()
+OPERANDS:    MEM0:w:qq:f32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:f32
+IFORM:       VMOVUPS_MEMf32_MASKmskw_YMMf32_AVX512
+}
+
+
+# EMITTING VMULPD (VMULPD-128-1)
+{
+ICLASS:      VMULPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x59 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VMULPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VMULPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x59 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMULPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMULPD (VMULPD-256-1)
+{
+ICLASS:      VMULPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x59 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VMULPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VMULPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x59 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VMULPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VMULPS (VMULPS-128-1)
+{
+ICLASS:      VMULPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x59 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VMULPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VMULPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x59 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMULPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VMULPS (VMULPS-256-1)
+{
+ICLASS:      VMULPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x59 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VMULPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VMULPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x59 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VMULPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VORPD (VORPD-128-1)
+{
+ICLASS:      VORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x56 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VORPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VORPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VORPD (VORPD-256-1)
+{
+ICLASS:      VORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x56 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VORPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VORPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VORPD (VORPD-512-1)
+{
+ICLASS:      VORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x56 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VORPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VORPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VORPS (VORPS-128-1)
+{
+ICLASS:      VORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x56 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VORPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x56 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VORPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VORPS (VORPS-256-1)
+{
+ICLASS:      VORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x56 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VORPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x56 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VORPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VORPS (VORPS-512-1)
+{
+ICLASS:      VORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x56 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VORPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x56 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VORPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPABSB (VPABSB-128-1)
+{
+ICLASS:      VPABSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPABSB_XMMi8_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPABSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x1C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i8
+IFORM:       VPABSB_XMMi8_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPABSB (VPABSB-256-1)
+{
+ICLASS:      VPABSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i8
+IFORM:       VPABSB_YMMi8_MASKmskw_YMMi8_AVX512
+}
+
+{
+ICLASS:      VPABSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x1C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:i8
+IFORM:       VPABSB_YMMi8_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPABSB (VPABSB-512-1)
+{
+ICLASS:      VPABSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zi8
+IFORM:       VPABSB_ZMMi8_MASKmskw_ZMMi8_AVX512
+}
+
+{
+ICLASS:      VPABSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x1C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:i8
+IFORM:       VPABSB_ZMMi8_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPABSD (VPABSD-128-1)
+{
+ICLASS:      VPABSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i32
+IFORM:       VPABSD_XMMi32_MASKmskw_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPABSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPABSD_XMMi32_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPABSD (VPABSD-256-1)
+{
+ICLASS:      VPABSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i32
+IFORM:       VPABSD_YMMi32_MASKmskw_YMMi32_AVX512
+}
+
+{
+ICLASS:      VPABSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPABSD_YMMi32_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPABSQ (VPABSQ-128-1)
+{
+ICLASS:      VPABSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i64
+IFORM:       VPABSQ_XMMi64_MASKmskw_XMMi64_AVX512
+}
+
+{
+ICLASS:      VPABSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPABSQ_XMMi64_MASKmskw_MEMi64_AVX512
+}
+
+
+# EMITTING VPABSQ (VPABSQ-256-1)
+{
+ICLASS:      VPABSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i64
+IFORM:       VPABSQ_YMMi64_MASKmskw_YMMi64_AVX512
+}
+
+{
+ICLASS:      VPABSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPABSQ_YMMi64_MASKmskw_MEMi64_AVX512
+}
+
+
+# EMITTING VPABSW (VPABSW-128-1)
+{
+ICLASS:      VPABSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPABSW_XMMi16_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPABSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x1D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i16
+IFORM:       VPABSW_XMMi16_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPABSW (VPABSW-256-1)
+{
+ICLASS:      VPABSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i16
+IFORM:       VPABSW_YMMi16_MASKmskw_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPABSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x1D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:i16
+IFORM:       VPABSW_YMMi16_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPABSW (VPABSW-512-1)
+{
+ICLASS:      VPABSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zi16
+IFORM:       VPABSW_ZMMi16_MASKmskw_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPABSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x1D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:i16
+IFORM:       VPABSW_ZMMi16_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPACKSSDW (VPACKSSDW-128-1)
+{
+ICLASS:      VPACKSSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i32 REG3=XMM_B3():r:dq:i32
+IFORM:       VPACKSSDW_XMMi16_MASKmskw_XMMi32_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPACKSSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPACKSSDW_XMMi16_MASKmskw_XMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPACKSSDW (VPACKSSDW-256-1)
+{
+ICLASS:      VPACKSSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i32 REG3=YMM_B3():r:qq:i32
+IFORM:       VPACKSSDW_YMMi16_MASKmskw_YMMi32_YMMi32_AVX512
+}
+
+{
+ICLASS:      VPACKSSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPACKSSDW_YMMi16_MASKmskw_YMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPACKSSDW (VPACKSSDW-512-1)
+{
+ICLASS:      VPACKSSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6B V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi32 REG3=ZMM_B3():r:zi32
+IFORM:       VPACKSSDW_ZMMi16_MASKmskw_ZMMi32_ZMMi32_AVX512
+}
+
+{
+ICLASS:      VPACKSSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6B V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPACKSSDW_ZMMi16_MASKmskw_ZMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPACKSSWB (VPACKSSWB-128-1)
+{
+ICLASS:      VPACKSSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16
+IFORM:       VPACKSSWB_XMMi8_MASKmskw_XMMi16_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPACKSSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x63 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16
+IFORM:       VPACKSSWB_XMMi8_MASKmskw_XMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPACKSSWB (VPACKSSWB-256-1)
+{
+ICLASS:      VPACKSSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16
+IFORM:       VPACKSSWB_YMMi8_MASKmskw_YMMi16_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPACKSSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x63 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16
+IFORM:       VPACKSSWB_YMMi8_MASKmskw_YMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPACKSSWB (VPACKSSWB-512-1)
+{
+ICLASS:      VPACKSSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16
+IFORM:       VPACKSSWB_ZMMi8_MASKmskw_ZMMi16_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPACKSSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x63 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16
+IFORM:       VPACKSSWB_ZMMi8_MASKmskw_ZMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPACKUSDW (VPACKUSDW-128-1)
+{
+ICLASS:      VPACKUSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x2B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPACKUSDW_XMMu16_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPACKUSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x2B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPACKUSDW_XMMu16_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPACKUSDW (VPACKUSDW-256-1)
+{
+ICLASS:      VPACKUSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x2B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPACKUSDW_YMMu16_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPACKUSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x2B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPACKUSDW_YMMu16_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPACKUSDW (VPACKUSDW-512-1)
+{
+ICLASS:      VPACKUSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x2B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPACKUSDW_ZMMu16_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPACKUSDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x2B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPACKUSDW_ZMMu16_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPACKUSWB (VPACKUSWB-128-1)
+{
+ICLASS:      VPACKUSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x67 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPACKUSWB_XMMu8_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPACKUSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x67 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPACKUSWB_XMMu8_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPACKUSWB (VPACKUSWB-256-1)
+{
+ICLASS:      VPACKUSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x67 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPACKUSWB_YMMu8_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPACKUSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x67 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPACKUSWB_YMMu8_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPACKUSWB (VPACKUSWB-512-1)
+{
+ICLASS:      VPACKUSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x67 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPACKUSWB_ZMMu8_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPACKUSWB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x67 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPACKUSWB_ZMMu8_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPADDB (VPADDB-128-1)
+{
+ICLASS:      VPADDB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPADDB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPADDB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xFC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPADDB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPADDB (VPADDB-256-1)
+{
+ICLASS:      VPADDB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPADDB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPADDB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xFC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPADDB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPADDB (VPADDB-512-1)
+{
+ICLASS:      VPADDB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPADDB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPADDB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xFC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPADDB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPADDD (VPADDD-128-1)
+{
+ICLASS:      VPADDD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPADDD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPADDD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPADDD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPADDD (VPADDD-256-1)
+{
+ICLASS:      VPADDD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPADDD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPADDD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPADDD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPADDQ (VPADDQ-128-1)
+{
+ICLASS:      VPADDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPADDQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPADDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xD4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPADDQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPADDQ (VPADDQ-256-1)
+{
+ICLASS:      VPADDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPADDQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPADDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xD4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPADDQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPADDSB (VPADDSB-128-1)
+{
+ICLASS:      VPADDSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i8 REG3=XMM_B3():r:dq:i8
+IFORM:       VPADDSB_XMMi8_MASKmskw_XMMi8_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPADDSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i8 MEM0:r:dq:i8
+IFORM:       VPADDSB_XMMi8_MASKmskw_XMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPADDSB (VPADDSB-256-1)
+{
+ICLASS:      VPADDSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i8 REG3=YMM_B3():r:qq:i8
+IFORM:       VPADDSB_YMMi8_MASKmskw_YMMi8_YMMi8_AVX512
+}
+
+{
+ICLASS:      VPADDSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i8 MEM0:r:qq:i8
+IFORM:       VPADDSB_YMMi8_MASKmskw_YMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPADDSB (VPADDSB-512-1)
+{
+ICLASS:      VPADDSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi8 REG3=ZMM_B3():r:zi8
+IFORM:       VPADDSB_ZMMi8_MASKmskw_ZMMi8_ZMMi8_AVX512
+}
+
+{
+ICLASS:      VPADDSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi8 MEM0:r:zd:i8
+IFORM:       VPADDSB_ZMMi8_MASKmskw_ZMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPADDSW (VPADDSW-128-1)
+{
+ICLASS:      VPADDSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xED V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16
+IFORM:       VPADDSW_XMMi16_MASKmskw_XMMi16_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPADDSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xED V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16
+IFORM:       VPADDSW_XMMi16_MASKmskw_XMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPADDSW (VPADDSW-256-1)
+{
+ICLASS:      VPADDSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xED V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16
+IFORM:       VPADDSW_YMMi16_MASKmskw_YMMi16_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPADDSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xED V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16
+IFORM:       VPADDSW_YMMi16_MASKmskw_YMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPADDSW (VPADDSW-512-1)
+{
+ICLASS:      VPADDSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xED V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16
+IFORM:       VPADDSW_ZMMi16_MASKmskw_ZMMi16_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPADDSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xED V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16
+IFORM:       VPADDSW_ZMMi16_MASKmskw_ZMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPADDUSB (VPADDUSB-128-1)
+{
+ICLASS:      VPADDUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPADDUSB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPADDUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPADDUSB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPADDUSB (VPADDUSB-256-1)
+{
+ICLASS:      VPADDUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPADDUSB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPADDUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPADDUSB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPADDUSB (VPADDUSB-512-1)
+{
+ICLASS:      VPADDUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDC V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPADDUSB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPADDUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDC V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPADDUSB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPADDUSW (VPADDUSW-128-1)
+{
+ICLASS:      VPADDUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDD V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPADDUSW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPADDUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDD V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPADDUSW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPADDUSW (VPADDUSW-256-1)
+{
+ICLASS:      VPADDUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDD V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPADDUSW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPADDUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDD V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPADDUSW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPADDUSW (VPADDUSW-512-1)
+{
+ICLASS:      VPADDUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDD V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPADDUSW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPADDUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDD V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPADDUSW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPADDW (VPADDW-128-1)
+{
+ICLASS:      VPADDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFD V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPADDW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPADDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xFD V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPADDW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPADDW (VPADDW-256-1)
+{
+ICLASS:      VPADDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFD V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPADDW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPADDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xFD V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPADDW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPADDW (VPADDW-512-1)
+{
+ICLASS:      VPADDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFD V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPADDW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPADDW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xFD V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPADDW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPALIGNR (VPALIGNR-128-1)
+{
+ICLASS:      VPALIGNR
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128     UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8 IMM0:r:b
+IFORM:       VPALIGNR_XMMu8_MASKmskw_XMMu8_XMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPALIGNR
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x0F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128     UIMM8()  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8 IMM0:r:b
+IFORM:       VPALIGNR_XMMu8_MASKmskw_XMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPALIGNR (VPALIGNR-256-1)
+{
+ICLASS:      VPALIGNR
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256     UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8 IMM0:r:b
+IFORM:       VPALIGNR_YMMu8_MASKmskw_YMMu8_YMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPALIGNR
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x0F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256     UIMM8()  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8 IMM0:r:b
+IFORM:       VPALIGNR_YMMu8_MASKmskw_YMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPALIGNR (VPALIGNR-512-1)
+{
+ICLASS:      VPALIGNR
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512     UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8 IMM0:r:b
+IFORM:       VPALIGNR_ZMMu8_MASKmskw_ZMMu8_ZMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPALIGNR
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x0F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512     UIMM8()  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8 IMM0:r:b
+IFORM:       VPALIGNR_ZMMu8_MASKmskw_ZMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPANDD (VPANDD-128-1)
+{
+ICLASS:      VPANDD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPANDD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPANDD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPANDD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPANDD (VPANDD-256-1)
+{
+ICLASS:      VPANDD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPANDD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPANDD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPANDD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPANDND (VPANDND-128-1)
+{
+ICLASS:      VPANDND
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPANDND_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPANDND
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPANDND_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPANDND (VPANDND-256-1)
+{
+ICLASS:      VPANDND
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPANDND_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPANDND
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPANDND_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPANDNQ (VPANDNQ-128-1)
+{
+ICLASS:      VPANDNQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPANDNQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPANDNQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPANDNQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPANDNQ (VPANDNQ-256-1)
+{
+ICLASS:      VPANDNQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPANDNQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPANDNQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPANDNQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPANDQ (VPANDQ-128-1)
+{
+ICLASS:      VPANDQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPANDQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPANDQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPANDQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPANDQ (VPANDQ-256-1)
+{
+ICLASS:      VPANDQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPANDQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPANDQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xDB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPANDQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPAVGB (VPAVGB-128-1)
+{
+ICLASS:      VPAVGB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE0 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPAVGB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPAVGB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE0 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPAVGB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPAVGB (VPAVGB-256-1)
+{
+ICLASS:      VPAVGB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE0 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPAVGB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPAVGB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE0 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPAVGB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPAVGB (VPAVGB-512-1)
+{
+ICLASS:      VPAVGB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE0 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPAVGB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPAVGB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE0 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPAVGB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPAVGW (VPAVGW-128-1)
+{
+ICLASS:      VPAVGW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPAVGW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPAVGW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPAVGW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPAVGW (VPAVGW-256-1)
+{
+ICLASS:      VPAVGW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPAVGW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPAVGW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPAVGW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPAVGW (VPAVGW-512-1)
+{
+ICLASS:      VPAVGW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPAVGW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPAVGW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPAVGW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPBLENDMB (VPBLENDMB-128-1)
+{
+ICLASS:      VPBLENDMB
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPBLENDMB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPBLENDMB
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPBLENDMB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPBLENDMB (VPBLENDMB-256-1)
+{
+ICLASS:      VPBLENDMB
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPBLENDMB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPBLENDMB
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPBLENDMB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPBLENDMB (VPBLENDMB-512-1)
+{
+ICLASS:      VPBLENDMB
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPBLENDMB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPBLENDMB
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPBLENDMB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPBLENDMD (VPBLENDMD-128-1)
+{
+ICLASS:      VPBLENDMD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPBLENDMD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPBLENDMD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPBLENDMD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPBLENDMD (VPBLENDMD-256-1)
+{
+ICLASS:      VPBLENDMD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPBLENDMD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPBLENDMD
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPBLENDMD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPBLENDMQ (VPBLENDMQ-128-1)
+{
+ICLASS:      VPBLENDMQ
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPBLENDMQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPBLENDMQ
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPBLENDMQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPBLENDMQ (VPBLENDMQ-256-1)
+{
+ICLASS:      VPBLENDMQ
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPBLENDMQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPBLENDMQ
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED MASK_AS_CONTROL
+PATTERN:    EVV 0x64 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPBLENDMQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPBLENDMW (VPBLENDMW-128-1)
+{
+ICLASS:      VPBLENDMW
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPBLENDMW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPBLENDMW
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPBLENDMW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPBLENDMW (VPBLENDMW-256-1)
+{
+ICLASS:      VPBLENDMW
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPBLENDMW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPBLENDMW
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPBLENDMW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPBLENDMW (VPBLENDMW-512-1)
+{
+ICLASS:      VPBLENDMW
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPBLENDMW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPBLENDMW
+CPL:         3
+CATEGORY:    BLEND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM MASK_AS_CONTROL
+PATTERN:    EVV 0x66 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPBLENDMW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPBROADCASTB (VPBROADCASTB-128-1)
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x78 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u8 EMX_BROADCAST_1TO16_8
+IFORM:       VPBROADCASTB_XMMu8_MASKmskw_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1_BYTE
+PATTERN:    EVV 0x78 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_8_BITS() NELEM_TUPLE1_BYTE()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:b:u8 EMX_BROADCAST_1TO16_8
+IFORM:       VPBROADCASTB_XMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPBROADCASTB (VPBROADCASTB-128-2)
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u8 EMX_BROADCAST_1TO16_8
+IFORM:       VPBROADCASTB_XMMu8_MASKmskw_GPR32u8_AVX512
+}
+
+
+# EMITTING VPBROADCASTB (VPBROADCASTB-256-1)
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x78 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u8 EMX_BROADCAST_1TO32_8
+IFORM:       VPBROADCASTB_YMMu8_MASKmskw_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1_BYTE
+PATTERN:    EVV 0x78 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_8_BITS() NELEM_TUPLE1_BYTE()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:b:u8 EMX_BROADCAST_1TO32_8
+IFORM:       VPBROADCASTB_YMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPBROADCASTB (VPBROADCASTB-256-2)
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u8 EMX_BROADCAST_1TO32_8
+IFORM:       VPBROADCASTB_YMMu8_MASKmskw_GPR32u8_AVX512
+}
+
+
+# EMITTING VPBROADCASTB (VPBROADCASTB-512-1)
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x78 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u8 EMX_BROADCAST_1TO64_8
+IFORM:       VPBROADCASTB_ZMMu8_MASKmskw_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1_BYTE
+PATTERN:    EVV 0x78 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_8_BITS() NELEM_TUPLE1_BYTE()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:b:u8 EMX_BROADCAST_1TO64_8
+IFORM:       VPBROADCASTB_ZMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPBROADCASTB (VPBROADCASTB-512-2)
+{
+ICLASS:      VPBROADCASTB
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u8 EMX_BROADCAST_1TO64_8
+IFORM:       VPBROADCASTB_ZMMu8_MASKmskw_GPR32u8_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-128-1)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x58 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:u32 EMX_BROADCAST_1TO4_32
+IFORM:       VPBROADCASTD_XMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-128-2)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x58 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 EMX_BROADCAST_1TO4_32
+IFORM:       VPBROADCASTD_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-128-3)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u32 EMX_BROADCAST_1TO4_32
+IFORM:       VPBROADCASTD_XMMu32_MASKmskw_GPR32u32_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-256-1)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x58 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:u32 EMX_BROADCAST_1TO8_32
+IFORM:       VPBROADCASTD_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-256-2)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x58 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 EMX_BROADCAST_1TO8_32
+IFORM:       VPBROADCASTD_YMMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPBROADCASTD (VPBROADCASTD-256-3)
+{
+ICLASS:      VPBROADCASTD
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u32 EMX_BROADCAST_1TO8_32
+IFORM:       VPBROADCASTD_YMMu32_MASKmskw_GPR32u32_AVX512
+}
+
+
+# EMITTING VPBROADCASTMB2Q (VPBROADCASTMB2Q-128-1)
+{
+ICLASS:      VPBROADCASTMB2Q
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x2A VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK_B():r:mskw:u64 EMX_BROADCAST_1TO2_8
+IFORM:       VPBROADCASTMB2Q_XMMu64_MASKu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTMB2Q (VPBROADCASTMB2Q-256-1)
+{
+ICLASS:      VPBROADCASTMB2Q
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x2A VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK_B():r:mskw:u64 EMX_BROADCAST_1TO4_8
+IFORM:       VPBROADCASTMB2Q_YMMu64_MASKu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTMW2D (VPBROADCASTMW2D-128-1)
+{
+ICLASS:      VPBROADCASTMW2D
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x3A VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK_B():r:mskw:u32 EMX_BROADCAST_1TO4_16
+IFORM:       VPBROADCASTMW2D_XMMu32_MASKu32_AVX512
+}
+
+
+# EMITTING VPBROADCASTMW2D (VPBROADCASTMW2D-256-1)
+{
+ICLASS:      VPBROADCASTMW2D
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x3A VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK_B():r:mskw:u32 EMX_BROADCAST_1TO8_16
+IFORM:       VPBROADCASTMW2D_YMMu32_MASKu32_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-128-1)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x59 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:u64 EMX_BROADCAST_1TO2_64
+IFORM:       VPBROADCASTQ_XMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-128-2)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x59 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64 EMX_BROADCAST_1TO2_64
+IFORM:       VPBROADCASTQ_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-128-3)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  mode64  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR64_B():r:q:u64 EMX_BROADCAST_1TO2_64
+IFORM:       VPBROADCASTQ_XMMu64_MASKmskw_GPR64u64_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-256-1)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1
+PATTERN:    EVV 0x59 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_TUPLE1()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:u64 EMX_BROADCAST_1TO4_64
+IFORM:       VPBROADCASTQ_YMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-256-2)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x59 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64 EMX_BROADCAST_1TO4_64
+IFORM:       VPBROADCASTQ_YMMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPBROADCASTQ (VPBROADCASTQ-256-3)
+{
+ICLASS:      VPBROADCASTQ
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  mode64  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR64_B():r:q:u64 EMX_BROADCAST_1TO4_64
+IFORM:       VPBROADCASTQ_YMMu64_MASKmskw_GPR64u64_AVX512
+}
+
+
+# EMITTING VPBROADCASTW (VPBROADCASTW-128-1)
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x79 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16 EMX_BROADCAST_1TO8_16
+IFORM:       VPBROADCASTW_XMMu16_MASKmskw_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1_WORD
+PATTERN:    EVV 0x79 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_16_BITS() NELEM_TUPLE1_WORD()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:wrd:u16 EMX_BROADCAST_1TO8_16
+IFORM:       VPBROADCASTW_XMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPBROADCASTW (VPBROADCASTW-128-2)
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u16 EMX_BROADCAST_1TO8_16
+IFORM:       VPBROADCASTW_XMMu16_MASKmskw_GPR32u16_AVX512
+}
+
+
+# EMITTING VPBROADCASTW (VPBROADCASTW-256-1)
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x79 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16 EMX_BROADCAST_1TO16_16
+IFORM:       VPBROADCASTW_YMMu16_MASKmskw_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1_WORD
+PATTERN:    EVV 0x79 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_16_BITS() NELEM_TUPLE1_WORD()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:wrd:u16 EMX_BROADCAST_1TO16_16
+IFORM:       VPBROADCASTW_YMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPBROADCASTW (VPBROADCASTW-256-2)
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u16 EMX_BROADCAST_1TO16_16
+IFORM:       VPBROADCASTW_YMMu16_MASKmskw_GPR32u16_AVX512
+}
+
+
+# EMITTING VPBROADCASTW (VPBROADCASTW-512-1)
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x79 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16 EMX_BROADCAST_1TO32_16
+IFORM:       VPBROADCASTW_ZMMu16_MASKmskw_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_TUPLE1_WORD
+PATTERN:    EVV 0x79 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_16_BITS() NELEM_TUPLE1_WORD()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:wrd:u16 EMX_BROADCAST_1TO32_16
+IFORM:       VPBROADCASTW_ZMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPBROADCASTW (VPBROADCASTW-512-2)
+{
+ICLASS:      VPBROADCASTW
+CPL:         3
+CATEGORY:    BROADCAST
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=GPR32_B():r:d:u16 EMX_BROADCAST_1TO32_16
+IFORM:       VPBROADCASTW_ZMMu16_MASKmskw_GPR32u16_AVX512
+}
+
+
+# EMITTING VPCMPB (VPCMPB-128-1)
+{
+ICLASS:      VPCMPB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i8 REG3=XMM_B3():r:dq:i8 IMM0:r:b
+IFORM:       VPCMPB_MASKmskw_MASKmskw_XMMi8_XMMi8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i8 MEM0:r:dq:i8 IMM0:r:b
+IFORM:       VPCMPB_MASKmskw_MASKmskw_XMMi8_MEMi8_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPB (VPCMPB-256-1)
+{
+ICLASS:      VPCMPB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i8 REG3=YMM_B3():r:qq:i8 IMM0:r:b
+IFORM:       VPCMPB_MASKmskw_MASKmskw_YMMi8_YMMi8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ZEROING=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i8 MEM0:r:qq:i8 IMM0:r:b
+IFORM:       VPCMPB_MASKmskw_MASKmskw_YMMi8_MEMi8_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPB (VPCMPB-512-1)
+{
+ICLASS:      VPCMPB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi8 REG3=ZMM_B3():r:zi8 IMM0:r:b
+IFORM:       VPCMPB_MASKmskw_MASKmskw_ZMMi8_ZMMi8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ZEROING=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi8 MEM0:r:zd:i8 IMM0:r:b
+IFORM:       VPCMPB_MASKmskw_MASKmskw_ZMMi8_MEMi8_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPD (VPCMPD-128-1)
+{
+ICLASS:      VPCMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i32 REG3=XMM_B3():r:dq:i32 IMM0:r:b
+IFORM:       VPCMPD_MASKmskw_MASKmskw_XMMi32_XMMi32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i32 MEM0:r:vv:i32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPD_MASKmskw_MASKmskw_XMMi32_MEMi32_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPD (VPCMPD-256-1)
+{
+ICLASS:      VPCMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i32 REG3=YMM_B3():r:qq:i32 IMM0:r:b
+IFORM:       VPCMPD_MASKmskw_MASKmskw_YMMi32_YMMi32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i32 MEM0:r:vv:i32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPD_MASKmskw_MASKmskw_YMMi32_MEMi32_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPEQB (VPCMPEQB-128-1)
+{
+ICLASS:      VPCMPEQB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x74 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPCMPEQB_MASKmskw_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPCMPEQB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x74 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPCMPEQB_MASKmskw_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPCMPEQB (VPCMPEQB-256-1)
+{
+ICLASS:      VPCMPEQB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x74 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPCMPEQB_MASKmskw_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPCMPEQB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x74 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPCMPEQB_MASKmskw_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPCMPEQB (VPCMPEQB-512-1)
+{
+ICLASS:      VPCMPEQB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x74 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPCMPEQB_MASKmskw_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPCMPEQB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x74 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPCMPEQB_MASKmskw_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPCMPEQD (VPCMPEQD-128-1)
+{
+ICLASS:      VPCMPEQD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPCMPEQD_MASKmskw_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPCMPEQD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPCMPEQD_MASKmskw_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPCMPEQD (VPCMPEQD-256-1)
+{
+ICLASS:      VPCMPEQD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPCMPEQD_MASKmskw_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPCMPEQD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPCMPEQD_MASKmskw_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPCMPEQQ (VPCMPEQQ-128-1)
+{
+ICLASS:      VPCMPEQQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPCMPEQQ_MASKmskw_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPCMPEQQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x29 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPCMPEQQ_MASKmskw_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPCMPEQQ (VPCMPEQQ-256-1)
+{
+ICLASS:      VPCMPEQQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x29 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPCMPEQQ_MASKmskw_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPCMPEQQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x29 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPCMPEQQ_MASKmskw_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPCMPEQW (VPCMPEQW-128-1)
+{
+ICLASS:      VPCMPEQW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPCMPEQW_MASKmskw_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPCMPEQW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPCMPEQW_MASKmskw_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPCMPEQW (VPCMPEQW-256-1)
+{
+ICLASS:      VPCMPEQW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPCMPEQW_MASKmskw_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPCMPEQW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPCMPEQW_MASKmskw_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPCMPEQW (VPCMPEQW-512-1)
+{
+ICLASS:      VPCMPEQW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPCMPEQW_MASKmskw_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPCMPEQW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPCMPEQW_MASKmskw_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPCMPGTB (VPCMPGTB-128-1)
+{
+ICLASS:      VPCMPGTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x64 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPCMPGTB_MASKmskw_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPCMPGTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x64 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPCMPGTB_MASKmskw_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPCMPGTB (VPCMPGTB-256-1)
+{
+ICLASS:      VPCMPGTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x64 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPCMPGTB_MASKmskw_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPCMPGTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x64 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPCMPGTB_MASKmskw_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPCMPGTB (VPCMPGTB-512-1)
+{
+ICLASS:      VPCMPGTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x64 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPCMPGTB_MASKmskw_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPCMPGTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x64 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPCMPGTB_MASKmskw_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPCMPGTD (VPCMPGTD-128-1)
+{
+ICLASS:      VPCMPGTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x66 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i32 REG3=XMM_B3():r:dq:i32
+IFORM:       VPCMPGTD_MASKmskw_MASKmskw_XMMi32_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPCMPGTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPCMPGTD_MASKmskw_MASKmskw_XMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPCMPGTD (VPCMPGTD-256-1)
+{
+ICLASS:      VPCMPGTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x66 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i32 REG3=YMM_B3():r:qq:i32
+IFORM:       VPCMPGTD_MASKmskw_MASKmskw_YMMi32_YMMi32_AVX512
+}
+
+{
+ICLASS:      VPCMPGTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x66 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPCMPGTD_MASKmskw_MASKmskw_YMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPCMPGTQ (VPCMPGTQ-128-1)
+{
+ICLASS:      VPCMPGTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x37 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i64 REG3=XMM_B3():r:dq:i64
+IFORM:       VPCMPGTQ_MASKmskw_MASKmskw_XMMi64_XMMi64_AVX512
+}
+
+{
+ICLASS:      VPCMPGTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x37 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPCMPGTQ_MASKmskw_MASKmskw_XMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPCMPGTQ (VPCMPGTQ-256-1)
+{
+ICLASS:      VPCMPGTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x37 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i64 REG3=YMM_B3():r:qq:i64
+IFORM:       VPCMPGTQ_MASKmskw_MASKmskw_YMMi64_YMMi64_AVX512
+}
+
+{
+ICLASS:      VPCMPGTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x37 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPCMPGTQ_MASKmskw_MASKmskw_YMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPCMPGTW (VPCMPGTW-128-1)
+{
+ICLASS:      VPCMPGTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x65 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPCMPGTW_MASKmskw_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPCMPGTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x65 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPCMPGTW_MASKmskw_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPCMPGTW (VPCMPGTW-256-1)
+{
+ICLASS:      VPCMPGTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x65 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPCMPGTW_MASKmskw_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPCMPGTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x65 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPCMPGTW_MASKmskw_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPCMPGTW (VPCMPGTW-512-1)
+{
+ICLASS:      VPCMPGTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x65 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPCMPGTW_MASKmskw_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPCMPGTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x65 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPCMPGTW_MASKmskw_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPCMPQ (VPCMPQ-128-1)
+{
+ICLASS:      VPCMPQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i64 REG3=XMM_B3():r:dq:i64 IMM0:r:b
+IFORM:       VPCMPQ_MASKmskw_MASKmskw_XMMi64_XMMi64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i64 MEM0:r:vv:i64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPQ_MASKmskw_MASKmskw_XMMi64_MEMi64_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPQ (VPCMPQ-256-1)
+{
+ICLASS:      VPCMPQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i64 REG3=YMM_B3():r:qq:i64 IMM0:r:b
+IFORM:       VPCMPQ_MASKmskw_MASKmskw_YMMi64_YMMi64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i64 MEM0:r:vv:i64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPQ_MASKmskw_MASKmskw_YMMi64_MEMi64_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUB (VPCMPUB-128-1)
+{
+ICLASS:      VPCMPUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8 IMM0:r:b
+IFORM:       VPCMPUB_MASKmskw_MASKmskw_XMMu8_XMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8 IMM0:r:b
+IFORM:       VPCMPUB_MASKmskw_MASKmskw_XMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUB (VPCMPUB-256-1)
+{
+ICLASS:      VPCMPUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8 IMM0:r:b
+IFORM:       VPCMPUB_MASKmskw_MASKmskw_YMMu8_YMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ZEROING=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8 IMM0:r:b
+IFORM:       VPCMPUB_MASKmskw_MASKmskw_YMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUB (VPCMPUB-512-1)
+{
+ICLASS:      VPCMPUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8 IMM0:r:b
+IFORM:       VPCMPUB_MASKmskw_MASKmskw_ZMMu8_ZMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ZEROING=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8 IMM0:r:b
+IFORM:       VPCMPUB_MASKmskw_MASKmskw_ZMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUD (VPCMPUD-128-1)
+{
+ICLASS:      VPCMPUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPCMPUD_MASKmskw_MASKmskw_XMMu32_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPUD_MASKmskw_MASKmskw_XMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUD (VPCMPUD-256-1)
+{
+ICLASS:      VPCMPUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPCMPUD_MASKmskw_MASKmskw_YMMu32_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ZEROING=0 UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPUD_MASKmskw_MASKmskw_YMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUQ (VPCMPUQ-128-1)
+{
+ICLASS:      VPCMPUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPCMPUQ_MASKmskw_MASKmskw_XMMu64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPUQ_MASKmskw_MASKmskw_XMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUQ (VPCMPUQ-256-1)
+{
+ICLASS:      VPCMPUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x1E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPCMPUQ_MASKmskw_MASKmskw_YMMu64_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x1E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ZEROING=0 UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPCMPUQ_MASKmskw_MASKmskw_YMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUW (VPCMPUW-128-1)
+{
+ICLASS:      VPCMPUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPCMPUW_MASKmskw_MASKmskw_XMMu16_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16 IMM0:r:b
+IFORM:       VPCMPUW_MASKmskw_MASKmskw_XMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUW (VPCMPUW-256-1)
+{
+ICLASS:      VPCMPUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16 IMM0:r:b
+IFORM:       VPCMPUW_MASKmskw_MASKmskw_YMMu16_YMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16 IMM0:r:b
+IFORM:       VPCMPUW_MASKmskw_MASKmskw_YMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPUW (VPCMPUW-512-1)
+{
+ICLASS:      VPCMPUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16 IMM0:r:b
+IFORM:       VPCMPUW_MASKmskw_MASKmskw_ZMMu16_ZMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16 IMM0:r:b
+IFORM:       VPCMPUW_MASKmskw_MASKmskw_ZMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPW (VPCMPW-128-1)
+{
+ICLASS:      VPCMPW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16 IMM0:r:b
+IFORM:       VPCMPW_MASKmskw_MASKmskw_XMMi16_XMMi16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16 IMM0:r:b
+IFORM:       VPCMPW_MASKmskw_MASKmskw_XMMi16_MEMi16_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPW (VPCMPW-256-1)
+{
+ICLASS:      VPCMPW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16 IMM0:r:b
+IFORM:       VPCMPW_MASKmskw_MASKmskw_YMMi16_YMMi16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16 IMM0:r:b
+IFORM:       VPCMPW_MASKmskw_MASKmskw_YMMi16_MEMi16_IMM8_AVX512
+}
+
+
+# EMITTING VPCMPW (VPCMPW-512-1)
+{
+ICLASS:      VPCMPW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0 UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16 IMM0:r:b
+IFORM:       VPCMPW_MASKmskw_MASKmskw_ZMMi16_ZMMi16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCMPW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3F V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ZEROING=0 UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16 IMM0:r:b
+IFORM:       VPCMPW_MASKmskw_MASKmskw_ZMMi16_MEMi16_IMM8_AVX512
+}
+
+
+# EMITTING VPCOMPRESSD (VPCOMPRESSD-128-1)
+{
+ICLASS:      VPCOMPRESSD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:dq:u32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VPCOMPRESSD_MEMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPCOMPRESSD (VPCOMPRESSD-128-2)
+{
+ICLASS:      VPCOMPRESSD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u32
+IFORM:       VPCOMPRESSD_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPCOMPRESSD (VPCOMPRESSD-256-1)
+{
+ICLASS:      VPCOMPRESSD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:qq:u32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VPCOMPRESSD_MEMu32_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPCOMPRESSD (VPCOMPRESSD-256-2)
+{
+ICLASS:      VPCOMPRESSD
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u32
+IFORM:       VPCOMPRESSD_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPCOMPRESSQ (VPCOMPRESSQ-128-1)
+{
+ICLASS:      VPCOMPRESSQ
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:dq:u64 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPCOMPRESSQ_MEMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPCOMPRESSQ (VPCOMPRESSQ-128-2)
+{
+ICLASS:      VPCOMPRESSQ
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VPCOMPRESSQ_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPCOMPRESSQ (VPCOMPRESSQ-256-1)
+{
+ICLASS:      VPCOMPRESSQ
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x8B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:qq:u64 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPCOMPRESSQ_MEMu64_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPCOMPRESSQ (VPCOMPRESSQ-256-2)
+{
+ICLASS:      VPCOMPRESSQ
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VPCOMPRESSQ_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPCONFLICTD (VPCONFLICTD-128-1)
+{
+ICLASS:      VPCONFLICTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VPCONFLICTD_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPCONFLICTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPCONFLICTD_XMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPCONFLICTD (VPCONFLICTD-256-1)
+{
+ICLASS:      VPCONFLICTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32
+IFORM:       VPCONFLICTD_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPCONFLICTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPCONFLICTD_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPCONFLICTQ (VPCONFLICTQ-128-1)
+{
+ICLASS:      VPCONFLICTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VPCONFLICTQ_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPCONFLICTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPCONFLICTQ_XMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPCONFLICTQ (VPCONFLICTQ-256-1)
+{
+ICLASS:      VPCONFLICTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VPCONFLICTQ_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPCONFLICTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPCONFLICTQ_YMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPERMD (VPERMD-256-1)
+{
+ICLASS:      VPERMD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x36 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPERMD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPERMD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x36 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPERMD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPERMI2D (VPERMI2D-128-1)
+{
+ICLASS:      VPERMI2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPERMI2D_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPERMI2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPERMI2D_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPERMI2D (VPERMI2D-256-1)
+{
+ICLASS:      VPERMI2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPERMI2D_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPERMI2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPERMI2D_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPERMI2PD (VPERMI2PD-128-1)
+{
+ICLASS:      VPERMI2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x77 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VPERMI2PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMI2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x77 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMI2PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMI2PD (VPERMI2PD-256-1)
+{
+ICLASS:      VPERMI2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x77 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VPERMI2PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMI2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x77 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMI2PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMI2PS (VPERMI2PS-128-1)
+{
+ICLASS:      VPERMI2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x77 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VPERMI2PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMI2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x77 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMI2PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMI2PS (VPERMI2PS-256-1)
+{
+ICLASS:      VPERMI2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x77 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VPERMI2PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMI2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x77 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMI2PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMI2Q (VPERMI2Q-128-1)
+{
+ICLASS:      VPERMI2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPERMI2Q_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPERMI2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPERMI2Q_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPERMI2Q (VPERMI2Q-256-1)
+{
+ICLASS:      VPERMI2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x76 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPERMI2Q_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPERMI2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x76 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPERMI2Q_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPERMI2W (VPERMI2W-128-1)
+{
+ICLASS:      VPERMI2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPERMI2W_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMI2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():rw:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPERMI2W_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPERMI2W (VPERMI2W-256-1)
+{
+ICLASS:      VPERMI2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPERMI2W_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMI2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():rw:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPERMI2W_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPERMI2W (VPERMI2W-512-1)
+{
+ICLASS:      VPERMI2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPERMI2W_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMI2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():rw:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPERMI2W_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPERMILPD (VPERMILPD-128-1)
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x05 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VPERMILPD_XMMf64_MASKmskw_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x05 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMILPD_XMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VPERMILPD (VPERMILPD-128-2)
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VPERMILPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x0D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMILPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMILPD (VPERMILPD-256-1)
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x05 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VPERMILPD_YMMf64_MASKmskw_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x05 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMILPD_YMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VPERMILPD (VPERMILPD-256-2)
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VPERMILPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMILPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x0D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMILPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMILPS (VPERMILPS-128-1)
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x04 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VPERMILPS_XMMf32_MASKmskw_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x04 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMILPS_XMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VPERMILPS (VPERMILPS-128-2)
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VPERMILPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x0C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMILPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMILPS (VPERMILPS-256-1)
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x04 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VPERMILPS_YMMf32_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x04 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMILPS_YMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VPERMILPS (VPERMILPS-256-2)
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VPERMILPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMILPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x0C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMILPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMPD (VPERMPD-256-1)
+{
+ICLASS:      VPERMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x01 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VPERMPD_YMMf64_MASKmskw_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x01 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMPD_YMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VPERMPD (VPERMPD-256-2)
+{
+ICLASS:      VPERMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x16 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VPERMPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x16 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMPS (VPERMPS-256-1)
+{
+ICLASS:      VPERMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x16 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VPERMPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x16 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMQ (VPERMQ-256-1)
+{
+ICLASS:      VPERMQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x00 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPERMQ_YMMu64_MASKmskw_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPERMQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x00 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPERMQ_YMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPERMQ (VPERMQ-256-2)
+{
+ICLASS:      VPERMQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x36 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPERMQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPERMQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x36 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPERMQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPERMT2D (VPERMT2D-128-1)
+{
+ICLASS:      VPERMT2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPERMT2D_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPERMT2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPERMT2D_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPERMT2D (VPERMT2D-256-1)
+{
+ICLASS:      VPERMT2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPERMT2D_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPERMT2D
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPERMT2D_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPERMT2PD (VPERMT2PD-128-1)
+{
+ICLASS:      VPERMT2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VPERMT2PD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMT2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMT2PD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMT2PD (VPERMT2PD-256-1)
+{
+ICLASS:      VPERMT2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VPERMT2PD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VPERMT2PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VPERMT2PD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VPERMT2PS (VPERMT2PS-128-1)
+{
+ICLASS:      VPERMT2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VPERMT2PS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMT2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMT2PS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMT2PS (VPERMT2PS-256-1)
+{
+ICLASS:      VPERMT2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VPERMT2PS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VPERMT2PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VPERMT2PS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VPERMT2Q (VPERMT2Q-128-1)
+{
+ICLASS:      VPERMT2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPERMT2Q_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPERMT2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPERMT2Q_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPERMT2Q (VPERMT2Q-256-1)
+{
+ICLASS:      VPERMT2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPERMT2Q_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPERMT2Q
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x7E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPERMT2Q_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPERMT2W (VPERMT2W-128-1)
+{
+ICLASS:      VPERMT2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPERMT2W_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMT2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():rw:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPERMT2W_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPERMT2W (VPERMT2W-256-1)
+{
+ICLASS:      VPERMT2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPERMT2W_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMT2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():rw:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPERMT2W_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPERMT2W (VPERMT2W-512-1)
+{
+ICLASS:      VPERMT2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPERMT2W_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMT2W
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():rw:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPERMT2W_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPERMW (VPERMW-128-1)
+{
+ICLASS:      VPERMW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPERMW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x8D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPERMW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPERMW (VPERMW-256-1)
+{
+ICLASS:      VPERMW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPERMW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x8D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPERMW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPERMW (VPERMW-512-1)
+{
+ICLASS:      VPERMW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPERMW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPERMW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x8D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPERMW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPEXPANDD (VPEXPANDD-128-1)
+{
+ICLASS:      VPEXPANDD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x89 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u32
+IFORM:       VPEXPANDD_XMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPEXPANDD (VPEXPANDD-128-2)
+{
+ICLASS:      VPEXPANDD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x89 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VPEXPANDD_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPEXPANDD (VPEXPANDD-256-1)
+{
+ICLASS:      VPEXPANDD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x89 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u32
+IFORM:       VPEXPANDD_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPEXPANDD (VPEXPANDD-256-2)
+{
+ICLASS:      VPEXPANDD
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x89 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32
+IFORM:       VPEXPANDD_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPEXPANDQ (VPEXPANDQ-128-1)
+{
+ICLASS:      VPEXPANDQ
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x89 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u64
+IFORM:       VPEXPANDQ_XMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPEXPANDQ (VPEXPANDQ-128-2)
+{
+ICLASS:      VPEXPANDQ
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x89 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VPEXPANDQ_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPEXPANDQ (VPEXPANDQ-256-1)
+{
+ICLASS:      VPEXPANDQ
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x89 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u64
+IFORM:       VPEXPANDQ_YMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPEXPANDQ (VPEXPANDQ-256-2)
+{
+ICLASS:      VPEXPANDQ
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x89 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VPEXPANDQ_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPEXTRB (VPEXTRB-128-1)
+{
+ICLASS:      VPEXTRB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x14 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR  ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=GPR32_B():w:d:u8 REG1=XMM_R3():r:dq:u8 IMM0:r:b
+IFORM:       VPEXTRB_GPR32u8_XMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPEXTRB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_STORE_BYTE
+PATTERN:    EVV 0x14 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ZEROING=0 MASK=0 UIMM8()  ESIZE_8_BITS() NELEM_GPR_WRITER_STORE_BYTE()
+OPERANDS:    MEM0:w:b:u8 REG0=XMM_R3():r:dq:u8 IMM0:r:b
+IFORM:       VPEXTRB_MEMu8_XMMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPEXTRD (VPEXTRD-128-1)
+{
+ICLASS:      VPEXTRD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x16 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=GPR32_B():w:d:u32 REG1=XMM_R3():r:dq:u32 IMM0:r:b
+IFORM:       VPEXTRD_GPR32u32_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPEXTRD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_STORE
+PATTERN:    EVV 0x16 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0 MASK=0 UIMM8()  ESIZE_32_BITS() NELEM_GPR_WRITER_STORE()
+OPERANDS:    MEM0:w:d:u32 REG0=XMM_R3():r:dq:u32 IMM0:r:b
+IFORM:       VPEXTRD_MEMu32_XMMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPEXTRQ (VPEXTRQ-128-1)
+{
+ICLASS:      VPEXTRQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x16 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  mode64  NOEVSR  ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=GPR64_B():w:q:u64 REG1=XMM_R3():r:dq:u64 IMM0:r:b
+IFORM:       VPEXTRQ_GPR64u64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPEXTRQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_STORE
+PATTERN:    EVV 0x16 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  mode64  NOEVSR  ZEROING=0 MASK=0 UIMM8()  ESIZE_64_BITS() NELEM_GPR_WRITER_STORE()
+OPERANDS:    MEM0:w:q:u64 REG0=XMM_R3():r:dq:u64 IMM0:r:b
+IFORM:       VPEXTRQ_MEMu64_XMMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPEXTRW (VPEXTRW-128-1)
+{
+ICLASS:      VPEXTRW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x15 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR  ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=GPR32_B():w:d:u16 REG1=XMM_R3():r:dq:u16 IMM0:r:b
+IFORM:       VPEXTRW_GPR32u16_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPEXTRW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_WRITER_STORE_WORD
+PATTERN:    EVV 0x15 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ZEROING=0 MASK=0 UIMM8()  ESIZE_16_BITS() NELEM_GPR_WRITER_STORE_WORD()
+OPERANDS:    MEM0:w:wrd:u16 REG0=XMM_R3():r:dq:u16 IMM0:r:b
+IFORM:       VPEXTRW_MEMu16_XMMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPEXTRW (VPEXTRW-128-2)
+{
+ICLASS:      VPEXTRW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0xC5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR  ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=GPR32_R():w:d:u16 REG1=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPEXTRW_GPR32u16_XMMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPGATHERDD (VPGATHERDD-128-2)
+{
+ICLASS:      VPGATHERDD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x90 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W0 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASKNOT0():rw:mskw MEM0:r:d:u32
+IFORM:       VPGATHERDD_XMMu32_MASKmskw_MEMu32_AVX512_VL128
+}
+
+
+# EMITTING VPGATHERDD (VPGATHERDD-256-2)
+{
+ICLASS:      VPGATHERDD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x90 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W0 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASKNOT0():rw:mskw MEM0:r:d:u32
+IFORM:       VPGATHERDD_YMMu32_MASKmskw_MEMu32_AVX512_VL256
+}
+
+
+# EMITTING VPGATHERDQ (VPGATHERDQ-128-2)
+{
+ICLASS:      VPGATHERDQ
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x90 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASKNOT0():rw:mskw MEM0:r:q:u64
+IFORM:       VPGATHERDQ_XMMu64_MASKmskw_MEMu64_AVX512_VL128
+}
+
+
+# EMITTING VPGATHERDQ (VPGATHERDQ-256-2)
+{
+ICLASS:      VPGATHERDQ
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES GATHER DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x90 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASKNOT0():rw:mskw MEM0:r:q:u64
+IFORM:       VPGATHERDQ_YMMu64_MASKmskw_MEMu64_AVX512_VL256
+}
+
+
+# EMITTING VPGATHERQD (VPGATHERQD-128-2)
+{
+ICLASS:      VPGATHERQD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x91 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W0 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASKNOT0():rw:mskw MEM0:r:d:u32
+IFORM:       VPGATHERQD_XMMu32_MASKmskw_MEMu32_AVX512_VL128
+}
+
+
+# EMITTING VPGATHERQD (VPGATHERQD-256-2)
+{
+ICLASS:      VPGATHERQD
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x91 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W0 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASKNOT0():rw:mskw MEM0:r:d:u32
+IFORM:       VPGATHERQD_XMMu32_MASKmskw_MEMu32_AVX512_VL256
+}
+
+
+# EMITTING VPGATHERQQ (VPGATHERQQ-128-2)
+{
+ICLASS:      VPGATHERQQ
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x91 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASKNOT0():rw:mskw MEM0:r:q:u64
+IFORM:       VPGATHERQQ_XMMu64_MASKmskw_MEMu64_AVX512_VL128
+}
+
+
+# EMITTING VPGATHERQQ (VPGATHERQQ-256-2)
+{
+ICLASS:      VPGATHERQQ
+CPL:         3
+CATEGORY:    GATHER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  GATHER QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED
+PATTERN:    EVV 0x91 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W1 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASKNOT0():rw:mskw MEM0:r:q:u64
+IFORM:       VPGATHERQQ_YMMu64_MASKmskw_MEMu64_AVX512_VL256
+}
+
+
+# EMITTING VPINSRB (VPINSRB-128-1)
+{
+ICLASS:      VPINSRB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x20 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=XMM_N3():r:dq:u8 REG2=GPR32_B():r:d:u8 IMM0:r:b
+IFORM:       VPINSRB_XMMu8_XMMu8_GPR32u8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPINSRB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_READER_BYTE
+PATTERN:    EVV 0x20 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0 UIMM8()  ESIZE_8_BITS() NELEM_GPR_READER_BYTE()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=XMM_N3():r:dq:u8 MEM0:r:b:u8 IMM0:r:b
+IFORM:       VPINSRB_XMMu8_XMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPINSRD (VPINSRD-128-1)
+{
+ICLASS:      VPINSRD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x22 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=XMM_N3():r:dq:u32 REG2=GPR32_B():r:d:u32 IMM0:r:b
+IFORM:       VPINSRD_XMMu32_XMMu32_GPR32u32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPINSRD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_READER
+PATTERN:    EVV 0x22 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0 MASK=0 UIMM8()  ESIZE_32_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=XMM_N3():r:dq:u32 MEM0:r:d:u32 IMM0:r:b
+IFORM:       VPINSRD_XMMu32_XMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPINSRQ (VPINSRQ-128-1)
+{
+ICLASS:      VPINSRQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x22 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  mode64    ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=XMM_N3():r:dq:u64 REG2=GPR64_B():r:q:u64 IMM0:r:b
+IFORM:       VPINSRQ_XMMu64_XMMu64_GPR64u64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPINSRQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_READER
+PATTERN:    EVV 0x22 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  mode64    ZEROING=0 MASK=0 UIMM8()  ESIZE_64_BITS() NELEM_GPR_READER()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=XMM_N3():r:dq:u64 MEM0:r:q:u64 IMM0:r:b
+IFORM:       VPINSRQ_XMMu64_XMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPINSRW (VPINSRW-128-1)
+{
+ICLASS:      VPINSRW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0xC4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=XMM_N3():r:dq:u16 REG2=GPR32_B():r:d:u16 IMM0:r:b
+IFORM:       VPINSRW_XMMu16_XMMu16_GPR32u16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPINSRW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128N
+EXCEPTIONS:     AVX512-E9NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_GPR_READER_WORD
+PATTERN:    EVV 0xC4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0 UIMM8()  ESIZE_16_BITS() NELEM_GPR_READER_WORD()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=XMM_N3():r:dq:u16 MEM0:r:wrd:u16 IMM0:r:b
+IFORM:       VPINSRW_XMMu16_XMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPLZCNTD (VPLZCNTD-128-1)
+{
+ICLASS:      VPLZCNTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x44 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VPLZCNTD_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPLZCNTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x44 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPLZCNTD_XMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPLZCNTD (VPLZCNTD-256-1)
+{
+ICLASS:      VPLZCNTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x44 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32
+IFORM:       VPLZCNTD_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPLZCNTD
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x44 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPLZCNTD_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPLZCNTQ (VPLZCNTQ-128-1)
+{
+ICLASS:      VPLZCNTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x44 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VPLZCNTQ_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPLZCNTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x44 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPLZCNTQ_XMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPLZCNTQ (VPLZCNTQ-256-1)
+{
+ICLASS:      VPLZCNTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x44 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VPLZCNTQ_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPLZCNTQ
+CPL:         3
+CATEGORY:    CONFLICT
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512CD_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x44 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPLZCNTQ_YMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPMADDUBSW (VPMADDUBSW-128-1)
+{
+ICLASS:      VPMADDUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x04 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16
+IFORM:       VPMADDUBSW_XMMi16_MASKmskw_XMMi16_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMADDUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x04 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16
+IFORM:       VPMADDUBSW_XMMi16_MASKmskw_XMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMADDUBSW (VPMADDUBSW-256-1)
+{
+ICLASS:      VPMADDUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x04 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16
+IFORM:       VPMADDUBSW_YMMi16_MASKmskw_YMMi16_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPMADDUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x04 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16
+IFORM:       VPMADDUBSW_YMMi16_MASKmskw_YMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMADDUBSW (VPMADDUBSW-512-1)
+{
+ICLASS:      VPMADDUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x04 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16
+IFORM:       VPMADDUBSW_ZMMi16_MASKmskw_ZMMi16_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPMADDUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x04 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16
+IFORM:       VPMADDUBSW_ZMMi16_MASKmskw_ZMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMADDWD (VPMADDWD-128-1)
+{
+ICLASS:      VPMADDWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16
+IFORM:       VPMADDWD_XMMi32_MASKmskw_XMMi16_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMADDWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16
+IFORM:       VPMADDWD_XMMi32_MASKmskw_XMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMADDWD (VPMADDWD-256-1)
+{
+ICLASS:      VPMADDWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16
+IFORM:       VPMADDWD_YMMi32_MASKmskw_YMMi16_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPMADDWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16
+IFORM:       VPMADDWD_YMMi32_MASKmskw_YMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMADDWD (VPMADDWD-512-1)
+{
+ICLASS:      VPMADDWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16
+IFORM:       VPMADDWD_ZMMi32_MASKmskw_ZMMi16_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPMADDWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16
+IFORM:       VPMADDWD_ZMMi32_MASKmskw_ZMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMAXSB (VPMAXSB-128-1)
+{
+ICLASS:      VPMAXSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i8 REG3=XMM_B3():r:dq:i8
+IFORM:       VPMAXSB_XMMi8_MASKmskw_XMMi8_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMAXSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i8 MEM0:r:dq:i8
+IFORM:       VPMAXSB_XMMi8_MASKmskw_XMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPMAXSB (VPMAXSB-256-1)
+{
+ICLASS:      VPMAXSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i8 REG3=YMM_B3():r:qq:i8
+IFORM:       VPMAXSB_YMMi8_MASKmskw_YMMi8_YMMi8_AVX512
+}
+
+{
+ICLASS:      VPMAXSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i8 MEM0:r:qq:i8
+IFORM:       VPMAXSB_YMMi8_MASKmskw_YMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPMAXSB (VPMAXSB-512-1)
+{
+ICLASS:      VPMAXSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi8 REG3=ZMM_B3():r:zi8
+IFORM:       VPMAXSB_ZMMi8_MASKmskw_ZMMi8_ZMMi8_AVX512
+}
+
+{
+ICLASS:      VPMAXSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi8 MEM0:r:zd:i8
+IFORM:       VPMAXSB_ZMMi8_MASKmskw_ZMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPMAXSD (VPMAXSD-128-1)
+{
+ICLASS:      VPMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i32 REG3=XMM_B3():r:dq:i32
+IFORM:       VPMAXSD_XMMi32_MASKmskw_XMMi32_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMAXSD_XMMi32_MASKmskw_XMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMAXSD (VPMAXSD-256-1)
+{
+ICLASS:      VPMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i32 REG3=YMM_B3():r:qq:i32
+IFORM:       VPMAXSD_YMMi32_MASKmskw_YMMi32_YMMi32_AVX512
+}
+
+{
+ICLASS:      VPMAXSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMAXSD_YMMi32_MASKmskw_YMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMAXSQ (VPMAXSQ-128-1)
+{
+ICLASS:      VPMAXSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i64 REG3=XMM_B3():r:dq:i64
+IFORM:       VPMAXSQ_XMMi64_MASKmskw_XMMi64_XMMi64_AVX512
+}
+
+{
+ICLASS:      VPMAXSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPMAXSQ_XMMi64_MASKmskw_XMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPMAXSQ (VPMAXSQ-256-1)
+{
+ICLASS:      VPMAXSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i64 REG3=YMM_B3():r:qq:i64
+IFORM:       VPMAXSQ_YMMi64_MASKmskw_YMMi64_YMMi64_AVX512
+}
+
+{
+ICLASS:      VPMAXSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPMAXSQ_YMMi64_MASKmskw_YMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPMAXSW (VPMAXSW-128-1)
+{
+ICLASS:      VPMAXSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16
+IFORM:       VPMAXSW_XMMi16_MASKmskw_XMMi16_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMAXSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16
+IFORM:       VPMAXSW_XMMi16_MASKmskw_XMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMAXSW (VPMAXSW-256-1)
+{
+ICLASS:      VPMAXSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16
+IFORM:       VPMAXSW_YMMi16_MASKmskw_YMMi16_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPMAXSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16
+IFORM:       VPMAXSW_YMMi16_MASKmskw_YMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMAXSW (VPMAXSW-512-1)
+{
+ICLASS:      VPMAXSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16
+IFORM:       VPMAXSW_ZMMi16_MASKmskw_ZMMi16_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPMAXSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16
+IFORM:       VPMAXSW_ZMMi16_MASKmskw_ZMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMAXUB (VPMAXUB-128-1)
+{
+ICLASS:      VPMAXUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPMAXUB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPMAXUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPMAXUB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPMAXUB (VPMAXUB-256-1)
+{
+ICLASS:      VPMAXUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPMAXUB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPMAXUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPMAXUB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPMAXUB (VPMAXUB-512-1)
+{
+ICLASS:      VPMAXUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDE V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPMAXUB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPMAXUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDE V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPMAXUB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPMAXUD (VPMAXUD-128-1)
+{
+ICLASS:      VPMAXUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPMAXUD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPMAXUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMAXUD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMAXUD (VPMAXUD-256-1)
+{
+ICLASS:      VPMAXUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPMAXUD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPMAXUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMAXUD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMAXUQ (VPMAXUQ-128-1)
+{
+ICLASS:      VPMAXUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPMAXUQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPMAXUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMAXUQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMAXUQ (VPMAXUQ-256-1)
+{
+ICLASS:      VPMAXUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPMAXUQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPMAXUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMAXUQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMAXUW (VPMAXUW-128-1)
+{
+ICLASS:      VPMAXUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPMAXUW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPMAXUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPMAXUW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMAXUW (VPMAXUW-256-1)
+{
+ICLASS:      VPMAXUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPMAXUW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPMAXUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPMAXUW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMAXUW (VPMAXUW-512-1)
+{
+ICLASS:      VPMAXUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPMAXUW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPMAXUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPMAXUW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMINSB (VPMINSB-128-1)
+{
+ICLASS:      VPMINSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x38 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i8 REG3=XMM_B3():r:dq:i8
+IFORM:       VPMINSB_XMMi8_MASKmskw_XMMi8_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMINSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x38 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i8 MEM0:r:dq:i8
+IFORM:       VPMINSB_XMMi8_MASKmskw_XMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPMINSB (VPMINSB-256-1)
+{
+ICLASS:      VPMINSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x38 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i8 REG3=YMM_B3():r:qq:i8
+IFORM:       VPMINSB_YMMi8_MASKmskw_YMMi8_YMMi8_AVX512
+}
+
+{
+ICLASS:      VPMINSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x38 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i8 MEM0:r:qq:i8
+IFORM:       VPMINSB_YMMi8_MASKmskw_YMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPMINSB (VPMINSB-512-1)
+{
+ICLASS:      VPMINSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x38 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi8 REG3=ZMM_B3():r:zi8
+IFORM:       VPMINSB_ZMMi8_MASKmskw_ZMMi8_ZMMi8_AVX512
+}
+
+{
+ICLASS:      VPMINSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x38 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi8 MEM0:r:zd:i8
+IFORM:       VPMINSB_ZMMi8_MASKmskw_ZMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPMINSD (VPMINSD-128-1)
+{
+ICLASS:      VPMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i32 REG3=XMM_B3():r:dq:i32
+IFORM:       VPMINSD_XMMi32_MASKmskw_XMMi32_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x39 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMINSD_XMMi32_MASKmskw_XMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMINSD (VPMINSD-256-1)
+{
+ICLASS:      VPMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i32 REG3=YMM_B3():r:qq:i32
+IFORM:       VPMINSD_YMMi32_MASKmskw_YMMi32_YMMi32_AVX512
+}
+
+{
+ICLASS:      VPMINSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x39 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMINSD_YMMi32_MASKmskw_YMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMINSQ (VPMINSQ-128-1)
+{
+ICLASS:      VPMINSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i64 REG3=XMM_B3():r:dq:i64
+IFORM:       VPMINSQ_XMMi64_MASKmskw_XMMi64_XMMi64_AVX512
+}
+
+{
+ICLASS:      VPMINSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x39 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPMINSQ_XMMi64_MASKmskw_XMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPMINSQ (VPMINSQ-256-1)
+{
+ICLASS:      VPMINSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x39 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i64 REG3=YMM_B3():r:qq:i64
+IFORM:       VPMINSQ_YMMi64_MASKmskw_YMMi64_YMMi64_AVX512
+}
+
+{
+ICLASS:      VPMINSQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x39 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i64 MEM0:r:vv:i64:TXT=BCASTSTR
+IFORM:       VPMINSQ_YMMi64_MASKmskw_YMMi64_MEMi64_AVX512
+}
+
+
+# EMITTING VPMINSW (VPMINSW-128-1)
+{
+ICLASS:      VPMINSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16
+IFORM:       VPMINSW_XMMi16_MASKmskw_XMMi16_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMINSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16
+IFORM:       VPMINSW_XMMi16_MASKmskw_XMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMINSW (VPMINSW-256-1)
+{
+ICLASS:      VPMINSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16
+IFORM:       VPMINSW_YMMi16_MASKmskw_YMMi16_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPMINSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16
+IFORM:       VPMINSW_YMMi16_MASKmskw_YMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMINSW (VPMINSW-512-1)
+{
+ICLASS:      VPMINSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16
+IFORM:       VPMINSW_ZMMi16_MASKmskw_ZMMi16_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPMINSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xEA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16
+IFORM:       VPMINSW_ZMMi16_MASKmskw_ZMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMINUB (VPMINUB-128-1)
+{
+ICLASS:      VPMINUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPMINUB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPMINUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPMINUB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPMINUB (VPMINUB-256-1)
+{
+ICLASS:      VPMINUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPMINUB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPMINUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPMINUB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPMINUB (VPMINUB-512-1)
+{
+ICLASS:      VPMINUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xDA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPMINUB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPMINUB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xDA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPMINUB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPMINUD (VPMINUD-128-1)
+{
+ICLASS:      VPMINUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPMINUD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPMINUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMINUD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMINUD (VPMINUD-256-1)
+{
+ICLASS:      VPMINUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPMINUD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPMINUD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMINUD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMINUQ (VPMINUQ-128-1)
+{
+ICLASS:      VPMINUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPMINUQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPMINUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMINUQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMINUQ (VPMINUQ-256-1)
+{
+ICLASS:      VPMINUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPMINUQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPMINUQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x3B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMINUQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMINUW (VPMINUW-128-1)
+{
+ICLASS:      VPMINUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPMINUW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPMINUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPMINUW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMINUW (VPMINUW-256-1)
+{
+ICLASS:      VPMINUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPMINUW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPMINUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPMINUW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMINUW (VPMINUW-512-1)
+{
+ICLASS:      VPMINUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x3A V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPMINUW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPMINUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x3A V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPMINUW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMOVB2M (VPMOVB2M-128-1)
+{
+ICLASS:      VPMOVB2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x29 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=XMM_B3():r:dq:u8
+IFORM:       VPMOVB2M_MASKmskw_XMMu8_AVX512
+}
+
+
+# EMITTING VPMOVB2M (VPMOVB2M-256-1)
+{
+ICLASS:      VPMOVB2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x29 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=YMM_B3():r:qq:u8
+IFORM:       VPMOVB2M_MASKmskw_YMMu8_AVX512
+}
+
+
+# EMITTING VPMOVB2M (VPMOVB2M-512-1)
+{
+ICLASS:      VPMOVB2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x29 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=ZMM_B3():r:zu8
+IFORM:       VPMOVB2M_MASKmskw_ZMMu8_AVX512
+}
+
+
+# EMITTING VPMOVD2M (VPMOVD2M-128-1)
+{
+ICLASS:      VPMOVD2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x39 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=XMM_B3():r:dq:u32
+IFORM:       VPMOVD2M_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVD2M (VPMOVD2M-256-1)
+{
+ICLASS:      VPMOVD2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x39 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=YMM_B3():r:qq:u32
+IFORM:       VPMOVD2M_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVD2M (VPMOVD2M-512-1)
+{
+ICLASS:      VPMOVD2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x39 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=ZMM_B3():r:zu32
+IFORM:       VPMOVD2M_MASKmskw_ZMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDB (VPMOVDB-128-1)
+{
+ICLASS:      VPMOVDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x31 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u32
+IFORM:       VPMOVDB_XMMu8_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDB (VPMOVDB-128-2)
+{
+ICLASS:      VPMOVDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x31 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:d:u8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VPMOVDB_MEMu8_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDB (VPMOVDB-256-1)
+{
+ICLASS:      VPMOVDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x31 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u32
+IFORM:       VPMOVDB_XMMu8_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDB (VPMOVDB-256-2)
+{
+ICLASS:      VPMOVDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x31 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:q:u8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VPMOVDB_MEMu8_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDW (VPMOVDW-128-1)
+{
+ICLASS:      VPMOVDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x33 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u32
+IFORM:       VPMOVDW_XMMu16_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDW (VPMOVDW-128-2)
+{
+ICLASS:      VPMOVDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x33 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:u16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VPMOVDW_MEMu16_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDW (VPMOVDW-256-1)
+{
+ICLASS:      VPMOVDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x33 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u32
+IFORM:       VPMOVDW_XMMu16_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVDW (VPMOVDW-256-2)
+{
+ICLASS:      VPMOVDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x33 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:u16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VPMOVDW_MEMu16_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVM2B (VPMOVM2B-128-1)
+{
+ICLASS:      VPMOVM2B
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x28 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2B_XMMu8_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2B (VPMOVM2B-256-1)
+{
+ICLASS:      VPMOVM2B
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x28 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2B_YMMu8_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2B (VPMOVM2B-512-1)
+{
+ICLASS:      VPMOVM2B
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x28 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2B_ZMMu8_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2D (VPMOVM2D-128-1)
+{
+ICLASS:      VPMOVM2D
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x38 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2D_XMMu32_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2D (VPMOVM2D-256-1)
+{
+ICLASS:      VPMOVM2D
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x38 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2D_YMMu32_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2D (VPMOVM2D-512-1)
+{
+ICLASS:      VPMOVM2D
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x38 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2D_ZMMu32_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2Q (VPMOVM2Q-128-1)
+{
+ICLASS:      VPMOVM2Q
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x38 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2Q_XMMu64_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2Q (VPMOVM2Q-256-1)
+{
+ICLASS:      VPMOVM2Q
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x38 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2Q_YMMu64_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2Q (VPMOVM2Q-512-1)
+{
+ICLASS:      VPMOVM2Q
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x38 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2Q_ZMMu64_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2W (VPMOVM2W-128-1)
+{
+ICLASS:      VPMOVM2W
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x28 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2W_XMMu16_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2W (VPMOVM2W-256-1)
+{
+ICLASS:      VPMOVM2W
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x28 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2W_YMMu16_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVM2W (VPMOVM2W-512-1)
+{
+ICLASS:      VPMOVM2W
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x28 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK_B():r:mskw
+IFORM:       VPMOVM2W_ZMMu16_MASKmskw_AVX512
+}
+
+
+# EMITTING VPMOVQ2M (VPMOVQ2M-128-1)
+{
+ICLASS:      VPMOVQ2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x39 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=XMM_B3():r:dq:u64
+IFORM:       VPMOVQ2M_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQ2M (VPMOVQ2M-256-1)
+{
+ICLASS:      VPMOVQ2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x39 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=YMM_B3():r:qq:u64
+IFORM:       VPMOVQ2M_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQ2M (VPMOVQ2M-512-1)
+{
+ICLASS:      VPMOVQ2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x39 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=ZMM_B3():r:zu64
+IFORM:       VPMOVQ2M_MASKmskw_ZMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQB (VPMOVQB-128-1)
+{
+ICLASS:      VPMOVQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x32 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VPMOVQB_XMMu8_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQB (VPMOVQB-128-2)
+{
+ICLASS:      VPMOVQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x32 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:wrd:u8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPMOVQB_MEMu8_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQB (VPMOVQB-256-1)
+{
+ICLASS:      VPMOVQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x32 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VPMOVQB_XMMu8_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQB (VPMOVQB-256-2)
+{
+ICLASS:      VPMOVQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x32 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:d:u8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPMOVQB_MEMu8_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQD (VPMOVQD-128-1)
+{
+ICLASS:      VPMOVQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x35 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VPMOVQD_XMMu32_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQD (VPMOVQD-128-2)
+{
+ICLASS:      VPMOVQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x35 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:u32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPMOVQD_MEMu32_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQD (VPMOVQD-256-1)
+{
+ICLASS:      VPMOVQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x35 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VPMOVQD_XMMu32_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQD (VPMOVQD-256-2)
+{
+ICLASS:      VPMOVQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x35 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:u32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPMOVQD_MEMu32_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQW (VPMOVQW-128-1)
+{
+ICLASS:      VPMOVQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x34 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VPMOVQW_XMMu16_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQW (VPMOVQW-128-2)
+{
+ICLASS:      VPMOVQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x34 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:d:u16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPMOVQW_MEMu16_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQW (VPMOVQW-256-1)
+{
+ICLASS:      VPMOVQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x34 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VPMOVQW_XMMu16_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVQW (VPMOVQW-256-2)
+{
+ICLASS:      VPMOVQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x34 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:q:u16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPMOVQW_MEMu16_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVSDB (VPMOVSDB-128-1)
+{
+ICLASS:      VPMOVSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x21 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:i32
+IFORM:       VPMOVSDB_XMMi8_MASKmskw_XMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDB (VPMOVSDB-128-2)
+{
+ICLASS:      VPMOVSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x21 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:d:i8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:i32
+IFORM:       VPMOVSDB_MEMi8_MASKmskw_XMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDB (VPMOVSDB-256-1)
+{
+ICLASS:      VPMOVSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x21 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:i32
+IFORM:       VPMOVSDB_XMMi8_MASKmskw_YMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDB (VPMOVSDB-256-2)
+{
+ICLASS:      VPMOVSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x21 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:q:i8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:i32
+IFORM:       VPMOVSDB_MEMi8_MASKmskw_YMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDW (VPMOVSDW-128-1)
+{
+ICLASS:      VPMOVSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:i32
+IFORM:       VPMOVSDW_XMMi16_MASKmskw_XMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDW (VPMOVSDW-128-2)
+{
+ICLASS:      VPMOVSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x23 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:i16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:i32
+IFORM:       VPMOVSDW_MEMi16_MASKmskw_XMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDW (VPMOVSDW-256-1)
+{
+ICLASS:      VPMOVSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:i32
+IFORM:       VPMOVSDW_XMMi16_MASKmskw_YMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSDW (VPMOVSDW-256-2)
+{
+ICLASS:      VPMOVSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x23 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:i16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:i32
+IFORM:       VPMOVSDW_MEMi16_MASKmskw_YMMi32_AVX512
+}
+
+
+# EMITTING VPMOVSQB (VPMOVSQB-128-1)
+{
+ICLASS:      VPMOVSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x22 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:i64
+IFORM:       VPMOVSQB_XMMi8_MASKmskw_XMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQB (VPMOVSQB-128-2)
+{
+ICLASS:      VPMOVSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x22 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:wrd:i8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:i64
+IFORM:       VPMOVSQB_MEMi8_MASKmskw_XMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQB (VPMOVSQB-256-1)
+{
+ICLASS:      VPMOVSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x22 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:i64
+IFORM:       VPMOVSQB_XMMi8_MASKmskw_YMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQB (VPMOVSQB-256-2)
+{
+ICLASS:      VPMOVSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x22 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:d:i8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:i64
+IFORM:       VPMOVSQB_MEMi8_MASKmskw_YMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQD (VPMOVSQD-128-1)
+{
+ICLASS:      VPMOVSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:i64
+IFORM:       VPMOVSQD_XMMi32_MASKmskw_XMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQD (VPMOVSQD-128-2)
+{
+ICLASS:      VPMOVSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x25 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:i32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:i64
+IFORM:       VPMOVSQD_MEMi32_MASKmskw_XMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQD (VPMOVSQD-256-1)
+{
+ICLASS:      VPMOVSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:i64
+IFORM:       VPMOVSQD_XMMi32_MASKmskw_YMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQD (VPMOVSQD-256-2)
+{
+ICLASS:      VPMOVSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x25 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:i32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:i64
+IFORM:       VPMOVSQD_MEMi32_MASKmskw_YMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQW (VPMOVSQW-128-1)
+{
+ICLASS:      VPMOVSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x24 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:i64
+IFORM:       VPMOVSQW_XMMi16_MASKmskw_XMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQW (VPMOVSQW-128-2)
+{
+ICLASS:      VPMOVSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x24 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:d:i16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:i64
+IFORM:       VPMOVSQW_MEMi16_MASKmskw_XMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQW (VPMOVSQW-256-1)
+{
+ICLASS:      VPMOVSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x24 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:i64
+IFORM:       VPMOVSQW_XMMi16_MASKmskw_YMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSQW (VPMOVSQW-256-2)
+{
+ICLASS:      VPMOVSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x24 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:q:i16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:i64
+IFORM:       VPMOVSQW_MEMi16_MASKmskw_YMMi64_AVX512
+}
+
+
+# EMITTING VPMOVSWB (VPMOVSWB-128-1)
+{
+ICLASS:      VPMOVSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x20 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:i16
+IFORM:       VPMOVSWB_XMMi8_MASKmskw_XMMi16_AVX512
+}
+
+
+# EMITTING VPMOVSWB (VPMOVSWB-128-2)
+{
+ICLASS:      VPMOVSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x20 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:i8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:i16
+IFORM:       VPMOVSWB_MEMi8_MASKmskw_XMMi16_AVX512
+}
+
+
+# EMITTING VPMOVSWB (VPMOVSWB-256-1)
+{
+ICLASS:      VPMOVSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x20 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:i16
+IFORM:       VPMOVSWB_XMMi8_MASKmskw_YMMi16_AVX512
+}
+
+
+# EMITTING VPMOVSWB (VPMOVSWB-256-2)
+{
+ICLASS:      VPMOVSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x20 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:i8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:i16
+IFORM:       VPMOVSWB_MEMi8_MASKmskw_YMMi16_AVX512
+}
+
+
+# EMITTING VPMOVSWB (VPMOVSWB-512-1)
+{
+ICLASS:      VPMOVSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x20 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zi16
+IFORM:       VPMOVSWB_YMMi8_MASKmskw_ZMMi16_AVX512
+}
+
+
+# EMITTING VPMOVSWB (VPMOVSWB-512-2)
+{
+ICLASS:      VPMOVSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x20 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:i8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zi16
+IFORM:       VPMOVSWB_MEMi8_MASKmskw_ZMMi16_AVX512
+}
+
+
+# EMITTING VPMOVSXBD (VPMOVSXBD-128-1)
+{
+ICLASS:      VPMOVSXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x21 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVSXBD_XMMi32_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x21 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:i8
+IFORM:       VPMOVSXBD_XMMi32_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXBD (VPMOVSXBD-256-1)
+{
+ICLASS:      VPMOVSXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x21 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVSXBD_YMMi32_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x21 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i8
+IFORM:       VPMOVSXBD_YMMi32_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXBQ (VPMOVSXBQ-128-1)
+{
+ICLASS:      VPMOVSXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x22 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVSXBQ_XMMi64_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x22 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:wrd:i8
+IFORM:       VPMOVSXBQ_XMMi64_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXBQ (VPMOVSXBQ-256-1)
+{
+ICLASS:      VPMOVSXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x22 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVSXBQ_YMMi64_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x22 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:i8
+IFORM:       VPMOVSXBQ_YMMi64_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXBW (VPMOVSXBW-128-1)
+{
+ICLASS:      VPMOVSXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x20 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVSXBW_XMMi16_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x20 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i8
+IFORM:       VPMOVSXBW_XMMi16_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXBW (VPMOVSXBW-256-1)
+{
+ICLASS:      VPMOVSXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x20 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVSXBW_YMMi16_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x20 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i8
+IFORM:       VPMOVSXBW_YMMi16_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXBW (VPMOVSXBW-512-1)
+{
+ICLASS:      VPMOVSXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x20 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i8
+IFORM:       VPMOVSXBW_ZMMi16_MASKmskw_YMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVSXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x20 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:i8
+IFORM:       VPMOVSXBW_ZMMi16_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVSXDQ (VPMOVSXDQ-128-1)
+{
+ICLASS:      VPMOVSXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i32
+IFORM:       VPMOVSXDQ_XMMi64_MASKmskw_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPMOVSXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x25 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i32
+IFORM:       VPMOVSXDQ_XMMi64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPMOVSXDQ (VPMOVSXDQ-256-1)
+{
+ICLASS:      VPMOVSXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i32
+IFORM:       VPMOVSXDQ_YMMi64_MASKmskw_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPMOVSXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x25 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i32
+IFORM:       VPMOVSXDQ_YMMi64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPMOVSXWD (VPMOVSXWD-128-1)
+{
+ICLASS:      VPMOVSXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVSXWD_XMMi32_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVSXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x23 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i16
+IFORM:       VPMOVSXWD_XMMi32_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVSXWD (VPMOVSXWD-256-1)
+{
+ICLASS:      VPMOVSXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVSXWD_YMMi32_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVSXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x23 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i16
+IFORM:       VPMOVSXWD_YMMi32_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVSXWQ (VPMOVSXWQ-128-1)
+{
+ICLASS:      VPMOVSXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x24 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVSXWQ_XMMi64_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVSXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x24 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:i16
+IFORM:       VPMOVSXWQ_XMMi64_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVSXWQ (VPMOVSXWQ-256-1)
+{
+ICLASS:      VPMOVSXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x24 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVSXWQ_YMMi64_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVSXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x24 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i16
+IFORM:       VPMOVSXWQ_YMMi64_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVUSDB (VPMOVUSDB-128-1)
+{
+ICLASS:      VPMOVUSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u32
+IFORM:       VPMOVUSDB_XMMu8_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDB (VPMOVUSDB-128-2)
+{
+ICLASS:      VPMOVUSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x11 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:d:u8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VPMOVUSDB_MEMu8_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDB (VPMOVUSDB-256-1)
+{
+ICLASS:      VPMOVUSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u32
+IFORM:       VPMOVUSDB_XMMu8_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDB (VPMOVUSDB-256-2)
+{
+ICLASS:      VPMOVUSDB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x11 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:q:u8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VPMOVUSDB_MEMu8_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDW (VPMOVUSDW-128-1)
+{
+ICLASS:      VPMOVUSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x13 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u32
+IFORM:       VPMOVUSDW_XMMu16_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDW (VPMOVUSDW-128-2)
+{
+ICLASS:      VPMOVUSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x13 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:u16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VPMOVUSDW_MEMu16_MASKmskw_XMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDW (VPMOVUSDW-256-1)
+{
+ICLASS:      VPMOVUSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x13 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u32
+IFORM:       VPMOVUSDW_XMMu16_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSDW (VPMOVUSDW-256-2)
+{
+ICLASS:      VPMOVUSDW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x13 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:u16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VPMOVUSDW_MEMu16_MASKmskw_YMMu32_AVX512
+}
+
+
+# EMITTING VPMOVUSQB (VPMOVUSQB-128-1)
+{
+ICLASS:      VPMOVUSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VPMOVUSQB_XMMu8_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQB (VPMOVUSQB-128-2)
+{
+ICLASS:      VPMOVUSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x12 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:wrd:u8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPMOVUSQB_MEMu8_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQB (VPMOVUSQB-256-1)
+{
+ICLASS:      VPMOVUSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VPMOVUSQB_XMMu8_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQB (VPMOVUSQB-256-2)
+{
+ICLASS:      VPMOVUSQB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x12 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    MEM0:w:d:u8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPMOVUSQB_MEMu8_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQD (VPMOVUSQD-128-1)
+{
+ICLASS:      VPMOVUSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VPMOVUSQD_XMMu32_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQD (VPMOVUSQD-128-2)
+{
+ICLASS:      VPMOVUSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x15 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:u32 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPMOVUSQD_MEMu32_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQD (VPMOVUSQD-256-1)
+{
+ICLASS:      VPMOVUSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VPMOVUSQD_XMMu32_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQD (VPMOVUSQD-256-2)
+{
+ICLASS:      VPMOVUSQD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x15 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:u32 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPMOVUSQD_MEMu32_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQW (VPMOVUSQW-128-1)
+{
+ICLASS:      VPMOVUSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u64
+IFORM:       VPMOVUSQW_XMMu16_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQW (VPMOVUSQW-128-2)
+{
+ICLASS:      VPMOVUSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x14 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:d:u16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPMOVUSQW_MEMu16_MASKmskw_XMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQW (VPMOVUSQW-256-1)
+{
+ICLASS:      VPMOVUSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u64
+IFORM:       VPMOVUSQW_XMMu16_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSQW (VPMOVUSQW-256-2)
+{
+ICLASS:      VPMOVUSQW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x14 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    MEM0:w:q:u16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPMOVUSQW_MEMu16_MASKmskw_YMMu64_AVX512
+}
+
+
+# EMITTING VPMOVUSWB (VPMOVUSWB-128-1)
+{
+ICLASS:      VPMOVUSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u16
+IFORM:       VPMOVUSWB_XMMu8_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VPMOVUSWB (VPMOVUSWB-128-2)
+{
+ICLASS:      VPMOVUSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x10 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:u8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u16
+IFORM:       VPMOVUSWB_MEMu8_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VPMOVUSWB (VPMOVUSWB-256-1)
+{
+ICLASS:      VPMOVUSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u16
+IFORM:       VPMOVUSWB_XMMu8_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VPMOVUSWB (VPMOVUSWB-256-2)
+{
+ICLASS:      VPMOVUSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x10 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:u8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u16
+IFORM:       VPMOVUSWB_MEMu8_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VPMOVUSWB (VPMOVUSWB-512-1)
+{
+ICLASS:      VPMOVUSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu16
+IFORM:       VPMOVUSWB_YMMu8_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VPMOVUSWB (VPMOVUSWB-512-2)
+{
+ICLASS:      VPMOVUSWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x10 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:u8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu16
+IFORM:       VPMOVUSWB_MEMu8_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VPMOVW2M (VPMOVW2M-128-1)
+{
+ICLASS:      VPMOVW2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x29 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=XMM_B3():r:dq:u16
+IFORM:       VPMOVW2M_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VPMOVW2M (VPMOVW2M-256-1)
+{
+ICLASS:      VPMOVW2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x29 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=YMM_B3():r:qq:u16
+IFORM:       VPMOVW2M_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VPMOVW2M (VPMOVW2M-512-1)
+{
+ICLASS:      VPMOVW2M
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E7NM
+REAL_OPCODE: Y
+PATTERN:    EVV 0x29 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR  ZEROING=0 MASK=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=ZMM_B3():r:zu16
+IFORM:       VPMOVW2M_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VPMOVWB (VPMOVWB-128-1)
+{
+ICLASS:      VPMOVWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x30 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u16
+IFORM:       VPMOVWB_XMMu8_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VPMOVWB (VPMOVWB-128-2)
+{
+ICLASS:      VPMOVWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x30 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:q:u8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u16
+IFORM:       VPMOVWB_MEMu8_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VPMOVWB (VPMOVWB-256-1)
+{
+ICLASS:      VPMOVWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x30 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u16
+IFORM:       VPMOVWB_XMMu8_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VPMOVWB (VPMOVWB-256-2)
+{
+ICLASS:      VPMOVWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x30 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:dq:u8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u16
+IFORM:       VPMOVWB_MEMu8_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VPMOVWB (VPMOVWB-512-1)
+{
+ICLASS:      VPMOVWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x30 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu16
+IFORM:       VPMOVWB_YMMu8_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VPMOVWB (VPMOVWB-512-2)
+{
+ICLASS:      VPMOVWB
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E6
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x30 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    MEM0:w:qq:u8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu16
+IFORM:       VPMOVWB_MEMu8_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VPMOVZXBD (VPMOVZXBD-128-1)
+{
+ICLASS:      VPMOVZXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x31 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVZXBD_XMMi32_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x31 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:i8
+IFORM:       VPMOVZXBD_XMMi32_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXBD (VPMOVZXBD-256-1)
+{
+ICLASS:      VPMOVZXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x31 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVZXBD_YMMi32_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x31 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_8_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i8
+IFORM:       VPMOVZXBD_YMMi32_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXBQ (VPMOVZXBQ-128-1)
+{
+ICLASS:      VPMOVZXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x32 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVZXBQ_XMMi64_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x32 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:wrd:i8
+IFORM:       VPMOVZXBQ_XMMi64_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXBQ (VPMOVZXBQ-256-1)
+{
+ICLASS:      VPMOVZXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x32 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVZXBQ_YMMi64_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_EIGHTHMEM
+PATTERN:    EVV 0x32 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_8_BITS() NELEM_EIGHTHMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:i8
+IFORM:       VPMOVZXBQ_YMMi64_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXBW (VPMOVZXBW-128-1)
+{
+ICLASS:      VPMOVZXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x30 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVZXBW_XMMi16_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x30 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i8
+IFORM:       VPMOVZXBW_XMMi16_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXBW (VPMOVZXBW-256-1)
+{
+ICLASS:      VPMOVZXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x30 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i8
+IFORM:       VPMOVZXBW_YMMi16_MASKmskw_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x30 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i8
+IFORM:       VPMOVZXBW_YMMi16_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXBW (VPMOVZXBW-512-1)
+{
+ICLASS:      VPMOVZXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x30 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:i8
+IFORM:       VPMOVZXBW_ZMMi16_MASKmskw_YMMi8_AVX512
+}
+
+{
+ICLASS:      VPMOVZXBW
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x30 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR  ESIZE_8_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:i8
+IFORM:       VPMOVZXBW_ZMMi16_MASKmskw_MEMi8_AVX512
+}
+
+
+# EMITTING VPMOVZXDQ (VPMOVZXDQ-128-1)
+{
+ICLASS:      VPMOVZXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x35 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i32
+IFORM:       VPMOVZXDQ_XMMi64_MASKmskw_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPMOVZXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x35 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i32
+IFORM:       VPMOVZXDQ_XMMi64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPMOVZXDQ (VPMOVZXDQ-256-1)
+{
+ICLASS:      VPMOVZXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x35 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i32
+IFORM:       VPMOVZXDQ_YMMi64_MASKmskw_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPMOVZXDQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x35 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i32
+IFORM:       VPMOVZXDQ_YMMi64_MASKmskw_MEMi32_AVX512
+}
+
+
+# EMITTING VPMOVZXWD (VPMOVZXWD-128-1)
+{
+ICLASS:      VPMOVZXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x33 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVZXWD_XMMi32_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVZXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x33 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i16
+IFORM:       VPMOVZXWD_XMMi32_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVZXWD (VPMOVZXWD-256-1)
+{
+ICLASS:      VPMOVZXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x33 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVZXWD_YMMi32_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVZXWD
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_HALFMEM
+PATTERN:    EVV 0x33 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_16_BITS() NELEM_HALFMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:i16
+IFORM:       VPMOVZXWD_YMMi32_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVZXWQ (VPMOVZXWQ-128-1)
+{
+ICLASS:      VPMOVZXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x34 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVZXWQ_XMMi64_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVZXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x34 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:d:i16
+IFORM:       VPMOVZXWQ_XMMi64_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMOVZXWQ (VPMOVZXWQ-256-1)
+{
+ICLASS:      VPMOVZXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x34 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:i16
+IFORM:       VPMOVZXWQ_YMMi64_MASKmskw_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMOVZXWQ
+CPL:         3
+CATEGORY:    DATAXFER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E5
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_QUARTERMEM
+PATTERN:    EVV 0x34 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR  ESIZE_16_BITS() NELEM_QUARTERMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:q:i16
+IFORM:       VPMOVZXWQ_YMMi64_MASKmskw_MEMi16_AVX512
+}
+
+
+# EMITTING VPMULDQ (VPMULDQ-128-1)
+{
+ICLASS:      VPMULDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i32 REG3=XMM_B3():r:dq:i32
+IFORM:       VPMULDQ_XMMi64_MASKmskw_XMMi32_XMMi32_AVX512
+}
+
+{
+ICLASS:      VPMULDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION DOUBLE_WIDE_MEMOP DISP8_FULL BROADCAST_ENABLED MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMULDQ_XMMi64_MASKmskw_XMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMULDQ (VPMULDQ-256-1)
+{
+ICLASS:      VPMULDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i32 REG3=YMM_B3():r:qq:i32
+IFORM:       VPMULDQ_YMMi64_MASKmskw_YMMi32_YMMi32_AVX512
+}
+
+{
+ICLASS:      VPMULDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION DOUBLE_WIDE_MEMOP DISP8_FULL BROADCAST_ENABLED MASKOP_EVEX
+PATTERN:    EVV 0x28 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:i64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i32 MEM0:r:vv:i32:TXT=BCASTSTR
+IFORM:       VPMULDQ_YMMi64_MASKmskw_YMMi32_MEMi32_AVX512
+}
+
+
+# EMITTING VPMULHRSW (VPMULHRSW-128-1)
+{
+ICLASS:      VPMULHRSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16
+IFORM:       VPMULHRSW_XMMi16_MASKmskw_XMMi16_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPMULHRSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x0B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16
+IFORM:       VPMULHRSW_XMMi16_MASKmskw_XMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMULHRSW (VPMULHRSW-256-1)
+{
+ICLASS:      VPMULHRSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16
+IFORM:       VPMULHRSW_YMMi16_MASKmskw_YMMi16_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPMULHRSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x0B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16
+IFORM:       VPMULHRSW_YMMi16_MASKmskw_YMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMULHRSW (VPMULHRSW-512-1)
+{
+ICLASS:      VPMULHRSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x0B V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16
+IFORM:       VPMULHRSW_ZMMi16_MASKmskw_ZMMi16_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPMULHRSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x0B V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16
+IFORM:       VPMULHRSW_ZMMi16_MASKmskw_ZMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPMULHUW (VPMULHUW-128-1)
+{
+ICLASS:      VPMULHUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPMULHUW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULHUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPMULHUW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULHUW (VPMULHUW-256-1)
+{
+ICLASS:      VPMULHUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPMULHUW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULHUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPMULHUW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULHUW (VPMULHUW-512-1)
+{
+ICLASS:      VPMULHUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPMULHUW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULHUW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPMULHUW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULHW (VPMULHW-128-1)
+{
+ICLASS:      VPMULHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPMULHW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPMULHW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULHW (VPMULHW-256-1)
+{
+ICLASS:      VPMULHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPMULHW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPMULHW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULHW (VPMULHW-512-1)
+{
+ICLASS:      VPMULHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPMULHW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPMULHW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULLD (VPMULLD-128-1)
+{
+ICLASS:      VPMULLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x40 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPMULLD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPMULLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x40 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMULLD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMULLD (VPMULLD-256-1)
+{
+ICLASS:      VPMULLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x40 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPMULLD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPMULLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x40 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMULLD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMULLQ (VPMULLQ-128-1)
+{
+ICLASS:      VPMULLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x40 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPMULLQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPMULLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x40 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMULLQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMULLQ (VPMULLQ-256-1)
+{
+ICLASS:      VPMULLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x40 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPMULLQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPMULLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x40 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMULLQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMULLQ (VPMULLQ-512-1)
+{
+ICLASS:      VPMULLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x40 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPMULLQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPMULLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x40 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMULLQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMULLW (VPMULLW-128-1)
+{
+ICLASS:      VPMULLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPMULLW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPMULLW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULLW (VPMULLW-256-1)
+{
+ICLASS:      VPMULLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPMULLW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPMULLW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULLW (VPMULLW-512-1)
+{
+ICLASS:      VPMULLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD5 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPMULLW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPMULLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD5 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPMULLW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPMULUDQ (VPMULUDQ-128-1)
+{
+ICLASS:      VPMULUDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPMULUDQ_XMMu64_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPMULUDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION DOUBLE_WIDE_MEMOP DISP8_FULL BROADCAST_ENABLED MASKOP_EVEX
+PATTERN:    EVV 0xF4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMULUDQ_XMMu64_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPMULUDQ (VPMULUDQ-256-1)
+{
+ICLASS:      VPMULUDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF4 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPMULUDQ_YMMu64_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPMULUDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION DOUBLE_WIDE_MEMOP DISP8_FULL BROADCAST_ENABLED MASKOP_EVEX
+PATTERN:    EVV 0xF4 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPMULUDQ_YMMu64_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPORD (VPORD-128-1)
+{
+ICLASS:      VPORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPORD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPORD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPORD (VPORD-256-1)
+{
+ICLASS:      VPORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPORD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPORD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPORQ (VPORQ-128-1)
+{
+ICLASS:      VPORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPORQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPORQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPORQ (VPORQ-256-1)
+{
+ICLASS:      VPORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPORQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPORQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPROLD (VPROLD-128-1)
+{
+ICLASS:      VPROLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b001] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPROLD_XMMu32_MASKmskw_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPROLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b001] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPROLD_XMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPROLD (VPROLD-256-1)
+{
+ICLASS:      VPROLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b001] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPROLD_YMMu32_MASKmskw_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPROLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b001] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPROLD_YMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPROLQ (VPROLQ-128-1)
+{
+ICLASS:      VPROLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b001] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPROLQ_XMMu64_MASKmskw_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPROLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b001] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPROLQ_XMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPROLQ (VPROLQ-256-1)
+{
+ICLASS:      VPROLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b001] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPROLQ_YMMu64_MASKmskw_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPROLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b001] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPROLQ_YMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPROLVD (VPROLVD-128-1)
+{
+ICLASS:      VPROLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPROLVD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPROLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPROLVD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPROLVD (VPROLVD-256-1)
+{
+ICLASS:      VPROLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPROLVD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPROLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPROLVD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPROLVQ (VPROLVQ-128-1)
+{
+ICLASS:      VPROLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPROLVQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPROLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPROLVQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPROLVQ (VPROLVQ-256-1)
+{
+ICLASS:      VPROLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPROLVQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPROLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPROLVQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPRORD (VPRORD-128-1)
+{
+ICLASS:      VPRORD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b000] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPRORD_XMMu32_MASKmskw_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPRORD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b000] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPRORD_XMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPRORD (VPRORD-256-1)
+{
+ICLASS:      VPRORD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b000] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPRORD_YMMu32_MASKmskw_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPRORD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b000] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPRORD_YMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPRORQ (VPRORQ-128-1)
+{
+ICLASS:      VPRORQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b000] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPRORQ_XMMu64_MASKmskw_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPRORQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b000] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPRORQ_XMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPRORQ (VPRORQ-256-1)
+{
+ICLASS:      VPRORQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b000] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPRORQ_YMMu64_MASKmskw_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPRORQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b000] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPRORQ_YMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPRORVD (VPRORVD-128-1)
+{
+ICLASS:      VPRORVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPRORVD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPRORVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPRORVD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPRORVD (VPRORVD-256-1)
+{
+ICLASS:      VPRORVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPRORVD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPRORVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPRORVD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPRORVQ (VPRORVQ-128-1)
+{
+ICLASS:      VPRORVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPRORVQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPRORVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPRORVQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPRORVQ (VPRORVQ-256-1)
+{
+ICLASS:      VPRORVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPRORVQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPRORVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPRORVQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSADBW (VPSADBW-128-1)
+{
+ICLASS:      VPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0xF6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=XMM_N3():r:dq:u8 REG2=XMM_B3():r:dq:u8
+IFORM:       VPSADBW_XMMu16_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xF6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPSADBW_XMMu16_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSADBW (VPSADBW-256-1)
+{
+ICLASS:      VPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0xF6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=YMM_N3():r:qq:u8 REG2=YMM_B3():r:qq:u8
+IFORM:       VPSADBW_YMMu16_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xF6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0 MASK=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPSADBW_YMMu16_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSADBW (VPSADBW-512-1)
+{
+ICLASS:      VPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0xF6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=ZMM_N3():r:zu8 REG2=ZMM_B3():r:zu8
+IFORM:       VPSADBW_ZMMu16_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPSADBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xF6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0 MASK=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPSADBW_ZMMu16_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSCATTERDD (VPSCATTERDD-128-1)
+{
+ICLASS:      VPSCATTERDD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA0 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W0 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:u32 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VPSCATTERDD_MEMu32_MASKmskw_XMMu32_AVX512_VL128
+}
+
+
+# EMITTING VPSCATTERDD (VPSCATTERDD-256-1)
+{
+ICLASS:      VPSCATTERDD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA0 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W0 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:u32 REG0=MASKNOT0():rw:mskw REG1=YMM_R3():r:qq:u32
+IFORM:       VPSCATTERDD_MEMu32_MASKmskw_YMMu32_AVX512_VL256
+}
+
+
+# EMITTING VPSCATTERDQ (VPSCATTERDQ-128-1)
+{
+ICLASS:      VPSCATTERDQ
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA0 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:u64 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPSCATTERDQ_MEMu64_MASKmskw_XMMu64_AVX512_VL128
+}
+
+
+# EMITTING VPSCATTERDQ (VPSCATTERDQ-256-1)
+{
+ICLASS:      VPSCATTERDQ
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA0 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:u64 REG0=MASKNOT0():rw:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPSCATTERDQ_MEMu64_MASKmskw_YMMu64_AVX512_VL256
+}
+
+
+# EMITTING VPSCATTERQD (VPSCATTERQD-128-1)
+{
+ICLASS:      VPSCATTERQD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA1 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W0 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:u32 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VPSCATTERQD_MEMu32_MASKmskw_XMMu32_AVX512_VL128
+}
+
+
+# EMITTING VPSCATTERQD (VPSCATTERQD-256-1)
+{
+ICLASS:      VPSCATTERQD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA1 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W0 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:u32 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:u32
+IFORM:       VPSCATTERQD_MEMu32_MASKmskw_XMMu32_AVX512_VL256
+}
+
+
+# EMITTING VPSCATTERQQ (VPSCATTERQQ-128-1)
+{
+ICLASS:      VPSCATTERQQ
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA1 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:u64 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:u64
+IFORM:       VPSCATTERQQ_MEMu64_MASKmskw_XMMu64_AVX512_VL128
+}
+
+
+# EMITTING VPSCATTERQQ (VPSCATTERQQ-256-1)
+{
+ICLASS:      VPSCATTERQQ
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA1 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W1 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:u64 REG0=MASKNOT0():rw:mskw REG1=YMM_R3():r:qq:u64
+IFORM:       VPSCATTERQQ_MEMu64_MASKmskw_YMMu64_AVX512_VL256
+}
+
+
+# EMITTING VPSHUFB (VPSHUFB-128-1)
+{
+ICLASS:      VPSHUFB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x00 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPSHUFB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPSHUFB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x00 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPSHUFB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSHUFB (VPSHUFB-256-1)
+{
+ICLASS:      VPSHUFB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x00 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPSHUFB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPSHUFB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x00 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPSHUFB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSHUFB (VPSHUFB-512-1)
+{
+ICLASS:      VPSHUFB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x00 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPSHUFB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPSHUFB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x00 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPSHUFB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSHUFD (VPSHUFD-128-1)
+{
+ICLASS:      VPSHUFD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPSHUFD_XMMu32_MASKmskw_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x70 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHUFD_XMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSHUFD (VPSHUFD-256-1)
+{
+ICLASS:      VPSHUFD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPSHUFD_YMMu32_MASKmskw_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x70 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHUFD_YMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSHUFHW (VPSHUFHW-128-1)
+{
+ICLASS:      VPSHUFHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPSHUFHW_XMMu16_MASKmskw_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u16 IMM0:r:b
+IFORM:       VPSHUFHW_XMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHUFHW (VPSHUFHW-256-1)
+{
+ICLASS:      VPSHUFHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u16 IMM0:r:b
+IFORM:       VPSHUFHW_YMMu16_MASKmskw_YMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u16 IMM0:r:b
+IFORM:       VPSHUFHW_YMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHUFHW (VPSHUFHW-512-1)
+{
+ICLASS:      VPSHUFHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 VF3 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu16 IMM0:r:b
+IFORM:       VPSHUFHW_ZMMu16_MASKmskw_ZMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFHW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 VF3 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u16 IMM0:r:b
+IFORM:       VPSHUFHW_ZMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHUFLW (VPSHUFLW-128-1)
+{
+ICLASS:      VPSHUFLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128    NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPSHUFLW_XMMu16_MASKmskw_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128    NOEVSR UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u16 IMM0:r:b
+IFORM:       VPSHUFLW_XMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHUFLW (VPSHUFLW-256-1)
+{
+ICLASS:      VPSHUFLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256    NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u16 IMM0:r:b
+IFORM:       VPSHUFLW_YMMu16_MASKmskw_YMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256    NOEVSR UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u16 IMM0:r:b
+IFORM:       VPSHUFLW_YMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHUFLW (VPSHUFLW-512-1)
+{
+ICLASS:      VPSHUFLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 VF2 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512    NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu16 IMM0:r:b
+IFORM:       VPSHUFLW_ZMMu16_MASKmskw_ZMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHUFLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 VF2 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512    NOEVSR UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u16 IMM0:r:b
+IFORM:       VPSHUFLW_ZMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLD (VPSLLD-128-1)
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSLLD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:dq:u32
+IFORM:       VPSLLD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSLLD (VPSLLD-128-3)
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPSLLD_XMMu32_MASKmskw_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSLLD_XMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLD (VPSLLD-256-1)
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSLLD_YMMu32_MASKmskw_YMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:dq:u32
+IFORM:       VPSLLD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSLLD (VPSLLD-256-3)
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPSLLD_YMMu32_MASKmskw_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSLLD_YMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLDQ (VPSLLDQ-128-2)
+{
+ICLASS:      VPSLLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b111] RM[nnn]  VL128      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u8 REG1=XMM_B3():r:dq:u8 IMM0:r:b
+IFORM:       VPSLLDQ_XMMu8_XMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b111] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_N3():w:dq:u8 MEM0:r:dq:u8 IMM0:r:b
+IFORM:       VPSLLDQ_XMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLDQ (VPSLLDQ-256-2)
+{
+ICLASS:      VPSLLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b111] RM[nnn]  VL256      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u8 REG1=YMM_B3():r:qq:u8 IMM0:r:b
+IFORM:       VPSLLDQ_YMMu8_YMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b111] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0 MASK=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_N3():w:qq:u8 MEM0:r:qq:u8 IMM0:r:b
+IFORM:       VPSLLDQ_YMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLDQ (VPSLLDQ-512-1)
+{
+ICLASS:      VPSLLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b111] RM[nnn]  VL512      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu8 REG1=ZMM_B3():r:zu8 IMM0:r:b
+IFORM:       VPSLLDQ_ZMMu8_ZMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b111] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0 MASK=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_N3():w:zu8 MEM0:r:zd:u8 IMM0:r:b
+IFORM:       VPSLLDQ_ZMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLQ (VPSLLQ-128-1)
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSLLQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:dq:u64
+IFORM:       VPSLLQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSLLQ (VPSLLQ-128-3)
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPSLLQ_XMMu64_MASKmskw_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSLLQ_XMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLQ (VPSLLQ-256-1)
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSLLQ_YMMu64_MASKmskw_YMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:dq:u64
+IFORM:       VPSLLQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSLLQ (VPSLLQ-256-3)
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPSLLQ_YMMu64_MASKmskw_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSLLQ_YMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLVD (VPSLLVD-128-1)
+{
+ICLASS:      VPSLLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x47 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSLLVD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSLLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x47 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSLLVD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSLLVD (VPSLLVD-256-1)
+{
+ICLASS:      VPSLLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x47 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPSLLVD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPSLLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x47 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSLLVD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSLLVQ (VPSLLVQ-128-1)
+{
+ICLASS:      VPSLLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x47 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSLLVQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSLLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x47 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSLLVQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSLLVQ (VPSLLVQ-256-1)
+{
+ICLASS:      VPSLLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x47 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPSLLVQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPSLLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x47 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSLLVQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSLLVW (VPSLLVW-128-1)
+{
+ICLASS:      VPSLLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSLLVW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSLLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x12 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSLLVW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSLLVW (VPSLLVW-256-1)
+{
+ICLASS:      VPSLLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPSLLVW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPSLLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x12 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPSLLVW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSLLVW (VPSLLVW-512-1)
+{
+ICLASS:      VPSLLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x12 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPSLLVW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPSLLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x12 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPSLLVW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSLLW (VPSLLW-128-1)
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSLLW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSLLW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSLLW (VPSLLW-128-3)
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL128     UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPSLLW_XMMu16_MASKmskw_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn] BCRC=0 MODRM()  VL128     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_N3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u16 IMM0:r:b
+IFORM:       VPSLLW_XMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLW (VPSLLW-256-1)
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSLLW_YMMu16_MASKmskw_YMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:dq:u16
+IFORM:       VPSLLW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSLLW (VPSLLW-256-3)
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL256     UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u16 IMM0:r:b
+IFORM:       VPSLLW_YMMu16_MASKmskw_YMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn] BCRC=0 MODRM()  VL256     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_N3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u16 IMM0:r:b
+IFORM:       VPSLLW_YMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSLLW (VPSLLW-512-1)
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSLLW_ZMMu16_MASKmskw_ZMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xF1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:dq:u16
+IFORM:       VPSLLW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSLLW (VPSLLW-512-2)
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b110] RM[nnn]  VL512     UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu16 IMM0:r:b
+IFORM:       VPSLLW_ZMMu16_MASKmskw_ZMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSLLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b110] RM[nnn] BCRC=0 MODRM()  VL512     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_N3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u16 IMM0:r:b
+IFORM:       VPSLLW_ZMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAD (VPSRAD-128-1)
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSRAD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:dq:u32
+IFORM:       VPSRAD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRAD (VPSRAD-128-3)
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPSRAD_XMMu32_MASKmskw_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRAD_XMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAD (VPSRAD-256-1)
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSRAD_YMMu32_MASKmskw_YMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:dq:u32
+IFORM:       VPSRAD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRAD (VPSRAD-256-3)
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPSRAD_YMMu32_MASKmskw_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRAD_YMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAQ (VPSRAQ-128-1)
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSRAQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:dq:u64
+IFORM:       VPSRAQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRAQ (VPSRAQ-128-2)
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPSRAQ_XMMu64_MASKmskw_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRAQ_XMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAQ (VPSRAQ-256-1)
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSRAQ_YMMu64_MASKmskw_YMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:dq:u64
+IFORM:       VPSRAQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRAQ (VPSRAQ-256-2)
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPSRAQ_YMMu64_MASKmskw_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRAQ_YMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAVD (VPSRAVD-128-1)
+{
+ICLASS:      VPSRAVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x46 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSRAVD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRAVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x46 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSRAVD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRAVD (VPSRAVD-256-1)
+{
+ICLASS:      VPSRAVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x46 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPSRAVD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRAVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x46 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSRAVD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRAVQ (VPSRAVQ-128-1)
+{
+ICLASS:      VPSRAVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x46 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSRAVQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRAVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x46 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSRAVQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRAVQ (VPSRAVQ-256-1)
+{
+ICLASS:      VPSRAVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x46 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPSRAVQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRAVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x46 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSRAVQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRAVW (VPSRAVW-128-1)
+{
+ICLASS:      VPSRAVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSRAVW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRAVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSRAVW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRAVW (VPSRAVW-256-1)
+{
+ICLASS:      VPSRAVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPSRAVW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRAVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPSRAVW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRAVW (VPSRAVW-512-1)
+{
+ICLASS:      VPSRAVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x11 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPSRAVW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRAVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x11 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPSRAVW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRAW (VPSRAW-128-1)
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSRAW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSRAW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRAW (VPSRAW-128-2)
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL128     UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPSRAW_XMMu16_MASKmskw_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn] BCRC=0 MODRM()  VL128     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_N3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u16 IMM0:r:b
+IFORM:       VPSRAW_XMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAW (VPSRAW-256-1)
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSRAW_YMMu16_MASKmskw_YMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:dq:u16
+IFORM:       VPSRAW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRAW (VPSRAW-256-2)
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL256     UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u16 IMM0:r:b
+IFORM:       VPSRAW_YMMu16_MASKmskw_YMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn] BCRC=0 MODRM()  VL256     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_N3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u16 IMM0:r:b
+IFORM:       VPSRAW_YMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSRAW (VPSRAW-512-1)
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSRAW_ZMMu16_MASKmskw_ZMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xE1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:dq:u16
+IFORM:       VPSRAW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRAW (VPSRAW-512-2)
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b100] RM[nnn]  VL512     UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu16 IMM0:r:b
+IFORM:       VPSRAW_ZMMu16_MASKmskw_ZMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRAW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b100] RM[nnn] BCRC=0 MODRM()  VL512     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_N3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u16 IMM0:r:b
+IFORM:       VPSRAW_ZMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLD (VPSRLD-128-1)
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSRLD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:dq:u32
+IFORM:       VPSRLD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRLD (VPSRLD-128-2)
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPSRLD_XMMu32_MASKmskw_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRLD_XMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLD (VPSRLD-256-1)
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD2 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSRLD_YMMu32_MASKmskw_YMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD2 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:dq:u32
+IFORM:       VPSRLD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRLD (VPSRLD-256-2)
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPSRLD_YMMu32_MASKmskw_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x72 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRLD_YMMu32_MASKmskw_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLDQ (VPSRLDQ-128-1)
+{
+ICLASS:      VPSRLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b011] RM[nnn]  VL128      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u8 REG1=XMM_B3():r:dq:u8 IMM0:r:b
+IFORM:       VPSRLDQ_XMMu8_XMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b011] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_N3():w:dq:u8 MEM0:r:dq:u8 IMM0:r:b
+IFORM:       VPSRLDQ_XMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLDQ (VPSRLDQ-256-1)
+{
+ICLASS:      VPSRLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b011] RM[nnn]  VL256      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u8 REG1=YMM_B3():r:qq:u8 IMM0:r:b
+IFORM:       VPSRLDQ_YMMu8_YMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b011] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0 MASK=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_N3():w:qq:u8 MEM0:r:qq:u8 IMM0:r:b
+IFORM:       VPSRLDQ_YMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLDQ (VPSRLDQ-512-1)
+{
+ICLASS:      VPSRLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b011] RM[nnn]  VL512      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu8 REG1=ZMM_B3():r:zu8 IMM0:r:b
+IFORM:       VPSRLDQ_ZMMu8_ZMMu8_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b011] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0 MASK=0 UIMM8()  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_N3():w:zu8 MEM0:r:zd:u8 IMM0:r:b
+IFORM:       VPSRLDQ_ZMMu8_MEMu8_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLQ (VPSRLQ-128-1)
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSRLQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:dq:u64
+IFORM:       VPSRLQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRLQ (VPSRLQ-128-2)
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPSRLQ_XMMu64_MASKmskw_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_N3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRLQ_XMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLQ (VPSRLQ-256-1)
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD3 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSRLQ_YMMu64_MASKmskw_YMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD3 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:dq:u64
+IFORM:       VPSRLQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRLQ (VPSRLQ-256-2)
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPSRLQ_YMMu64_MASKmskw_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_N3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSRLQ_YMMu64_MASKmskw_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLVD (VPSRLVD-128-1)
+{
+ICLASS:      VPSRLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x45 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSRLVD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x45 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSRLVD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRLVD (VPSRLVD-256-1)
+{
+ICLASS:      VPSRLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x45 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPSRLVD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPSRLVD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x45 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSRLVD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSRLVQ (VPSRLVQ-128-1)
+{
+ICLASS:      VPSRLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x45 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSRLVQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x45 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSRLVQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRLVQ (VPSRLVQ-256-1)
+{
+ICLASS:      VPSRLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x45 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPSRLVQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPSRLVQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x45 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSRLVQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSRLVW (VPSRLVW-128-1)
+{
+ICLASS:      VPSRLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSRLVW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSRLVW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRLVW (VPSRLVW-256-1)
+{
+ICLASS:      VPSRLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPSRLVW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPSRLVW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRLVW (VPSRLVW-512-1)
+{
+ICLASS:      VPSRLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x10 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPSRLVW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRLVW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x10 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPSRLVW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRLW (VPSRLW-128-1)
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSRLW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSRLW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRLW (VPSRLW-128-2)
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL128     UIMM8()
+OPERANDS:    REG0=XMM_N3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPSRLW_XMMu16_MASKmskw_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn] BCRC=0 MODRM()  VL128     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_N3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u16 IMM0:r:b
+IFORM:       VPSRLW_XMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLW (VPSRLW-256-1)
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSRLW_YMMu16_MASKmskw_YMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:dq:u16
+IFORM:       VPSRLW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRLW (VPSRLW-256-2)
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL256     UIMM8()
+OPERANDS:    REG0=YMM_N3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u16 IMM0:r:b
+IFORM:       VPSRLW_YMMu16_MASKmskw_YMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn] BCRC=0 MODRM()  VL256     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_N3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u16 IMM0:r:b
+IFORM:       VPSRLW_YMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSRLW (VPSRLW-512-1)
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD1 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSRLW_ZMMu16_MASKmskw_ZMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_MEM128
+PATTERN:    EVV 0xD1 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_MEM128()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:dq:u16
+IFORM:       VPSRLW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSRLW (VPSRLW-512-2)
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[0b010] RM[nnn]  VL512     UIMM8()
+OPERANDS:    REG0=ZMM_N3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu16 IMM0:r:b
+IFORM:       VPSRLW_ZMMu16_MASKmskw_ZMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSRLW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x71 V66 V0F MOD[mm] MOD!=3 REG[0b010] RM[nnn] BCRC=0 MODRM()  VL512     UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_N3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u16 IMM0:r:b
+IFORM:       VPSRLW_ZMMu16_MASKmskw_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSUBB (VPSUBB-128-1)
+{
+ICLASS:      VPSUBB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPSUBB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPSUBB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPSUBB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSUBB (VPSUBB-256-1)
+{
+ICLASS:      VPSUBB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPSUBB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPSUBB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPSUBB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSUBB (VPSUBB-512-1)
+{
+ICLASS:      VPSUBB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPSUBB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPSUBB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPSUBB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSUBD (VPSUBD-128-1)
+{
+ICLASS:      VPSUBD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSUBD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSUBD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSUBD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSUBD (VPSUBD-256-1)
+{
+ICLASS:      VPSUBD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFA V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPSUBD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPSUBD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFA V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSUBD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSUBQ (VPSUBQ-128-1)
+{
+ICLASS:      VPSUBQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSUBQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSUBQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSUBQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSUBQ (VPSUBQ-256-1)
+{
+ICLASS:      VPSUBQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xFB V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPSUBQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPSUBQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xFB V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSUBQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSUBSB (VPSUBSB-128-1)
+{
+ICLASS:      VPSUBSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i8 REG3=XMM_B3():r:dq:i8
+IFORM:       VPSUBSB_XMMi8_MASKmskw_XMMi8_XMMi8_AVX512
+}
+
+{
+ICLASS:      VPSUBSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i8 MEM0:r:dq:i8
+IFORM:       VPSUBSB_XMMi8_MASKmskw_XMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPSUBSB (VPSUBSB-256-1)
+{
+ICLASS:      VPSUBSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i8 REG3=YMM_B3():r:qq:i8
+IFORM:       VPSUBSB_YMMi8_MASKmskw_YMMi8_YMMi8_AVX512
+}
+
+{
+ICLASS:      VPSUBSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i8 MEM0:r:qq:i8
+IFORM:       VPSUBSB_YMMi8_MASKmskw_YMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPSUBSB (VPSUBSB-512-1)
+{
+ICLASS:      VPSUBSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi8 REG3=ZMM_B3():r:zi8
+IFORM:       VPSUBSB_ZMMi8_MASKmskw_ZMMi8_ZMMi8_AVX512
+}
+
+{
+ICLASS:      VPSUBSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi8 MEM0:r:zd:i8
+IFORM:       VPSUBSB_ZMMi8_MASKmskw_ZMMi8_MEMi8_AVX512
+}
+
+
+# EMITTING VPSUBSW (VPSUBSW-128-1)
+{
+ICLASS:      VPSUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:i16
+IFORM:       VPSUBSW_XMMi16_MASKmskw_XMMi16_XMMi16_AVX512
+}
+
+{
+ICLASS:      VPSUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:dq:i16
+IFORM:       VPSUBSW_XMMi16_MASKmskw_XMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPSUBSW (VPSUBSW-256-1)
+{
+ICLASS:      VPSUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:i16
+IFORM:       VPSUBSW_YMMi16_MASKmskw_YMMi16_YMMi16_AVX512
+}
+
+{
+ICLASS:      VPSUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:i16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:qq:i16
+IFORM:       VPSUBSW_YMMi16_MASKmskw_YMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPSUBSW (VPSUBSW-512-1)
+{
+ICLASS:      VPSUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xE9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zi16
+IFORM:       VPSUBSW_ZMMi16_MASKmskw_ZMMi16_ZMMi16_AVX512
+}
+
+{
+ICLASS:      VPSUBSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xE9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zi16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:zd:i16
+IFORM:       VPSUBSW_ZMMi16_MASKmskw_ZMMi16_MEMi16_AVX512
+}
+
+
+# EMITTING VPSUBUSB (VPSUBUSB-128-1)
+{
+ICLASS:      VPSUBUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPSUBUSB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPSUBUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPSUBUSB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSUBUSB (VPSUBUSB-256-1)
+{
+ICLASS:      VPSUBUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPSUBUSB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPSUBUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPSUBUSB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSUBUSB (VPSUBUSB-512-1)
+{
+ICLASS:      VPSUBUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD8 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPSUBUSB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPSUBUSB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD8 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPSUBUSB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPSUBUSW (VPSUBUSW-128-1)
+{
+ICLASS:      VPSUBUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSUBUSW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSUBUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSUBUSW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSUBUSW (VPSUBUSW-256-1)
+{
+ICLASS:      VPSUBUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPSUBUSW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPSUBUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPSUBUSW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSUBUSW (VPSUBUSW-512-1)
+{
+ICLASS:      VPSUBUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xD9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPSUBUSW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPSUBUSW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xD9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPSUBUSW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSUBW (VPSUBW-128-1)
+{
+ICLASS:      VPSUBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSUBW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSUBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSUBW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSUBW (VPSUBW-256-1)
+{
+ICLASS:      VPSUBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPSUBW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPSUBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPSUBW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSUBW (VPSUBW-512-1)
+{
+ICLASS:      VPSUBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xF9 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPSUBW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPSUBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xF9 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPSUBW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPTERNLOGD (VPTERNLOGD-128-1)
+{
+ICLASS:      VPTERNLOGD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPTERNLOGD_XMMu32_MASKmskw_XMMu32_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPTERNLOGD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x25 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPTERNLOGD_XMMu32_MASKmskw_XMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPTERNLOGD (VPTERNLOGD-256-1)
+{
+ICLASS:      VPTERNLOGD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPTERNLOGD_YMMu32_MASKmskw_YMMu32_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPTERNLOGD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x25 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPTERNLOGD_YMMu32_MASKmskw_YMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPTERNLOGQ (VPTERNLOGQ-128-1)
+{
+ICLASS:      VPTERNLOGQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPTERNLOGQ_XMMu64_MASKmskw_XMMu64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPTERNLOGQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x25 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPTERNLOGQ_XMMu64_MASKmskw_XMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPTERNLOGQ (VPTERNLOGQ-256-1)
+{
+ICLASS:      VPTERNLOGQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x25 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPTERNLOGQ_YMMu64_MASKmskw_YMMu64_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPTERNLOGQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x25 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPTERNLOGQ_YMMu64_MASKmskw_YMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPTESTMB (VPTESTMB-128-1)
+{
+ICLASS:      VPTESTMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPTESTMB_MASKmskw_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPTESTMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPTESTMB_MASKmskw_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPTESTMB (VPTESTMB-256-1)
+{
+ICLASS:      VPTESTMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPTESTMB_MASKmskw_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPTESTMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPTESTMB_MASKmskw_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPTESTMB (VPTESTMB-512-1)
+{
+ICLASS:      VPTESTMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPTESTMB_MASKmskw_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPTESTMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPTESTMB_MASKmskw_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPTESTMD (VPTESTMD-128-1)
+{
+ICLASS:      VPTESTMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPTESTMD_MASKmskw_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPTESTMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPTESTMD_MASKmskw_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPTESTMD (VPTESTMD-256-1)
+{
+ICLASS:      VPTESTMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPTESTMD_MASKmskw_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPTESTMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPTESTMD_MASKmskw_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPTESTMQ (VPTESTMQ-128-1)
+{
+ICLASS:      VPTESTMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPTESTMQ_MASKmskw_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPTESTMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPTESTMQ_MASKmskw_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPTESTMQ (VPTESTMQ-256-1)
+{
+ICLASS:      VPTESTMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPTESTMQ_MASKmskw_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPTESTMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPTESTMQ_MASKmskw_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPTESTMW (VPTESTMW-128-1)
+{
+ICLASS:      VPTESTMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPTESTMW_MASKmskw_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPTESTMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPTESTMW_MASKmskw_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPTESTMW (VPTESTMW-256-1)
+{
+ICLASS:      VPTESTMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPTESTMW_MASKmskw_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPTESTMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPTESTMW_MASKmskw_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPTESTMW (VPTESTMW-512-1)
+{
+ICLASS:      VPTESTMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPTESTMW_MASKmskw_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPTESTMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPTESTMW_MASKmskw_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPTESTNMB (VPTESTNMB-128-1)
+{
+ICLASS:      VPTESTNMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPTESTNMB_MASKmskw_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPTESTNMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPTESTNMB_MASKmskw_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPTESTNMB (VPTESTNMB-256-1)
+{
+ICLASS:      VPTESTNMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPTESTNMB_MASKmskw_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPTESTNMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPTESTNMB_MASKmskw_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPTESTNMB (VPTESTNMB-512-1)
+{
+ICLASS:      VPTESTNMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPTESTNMB_MASKmskw_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPTESTNMB
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPTESTNMB_MASKmskw_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPTESTNMD (VPTESTNMD-128-1)
+{
+ICLASS:      VPTESTNMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPTESTNMD_MASKmskw_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPTESTNMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPTESTNMD_MASKmskw_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPTESTNMD (VPTESTNMD-256-1)
+{
+ICLASS:      VPTESTNMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPTESTNMD_MASKmskw_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPTESTNMD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ZEROING=0  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPTESTNMD_MASKmskw_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPTESTNMQ (VPTESTNMQ-128-1)
+{
+ICLASS:      VPTESTNMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPTESTNMQ_MASKmskw_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPTESTNMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPTESTNMQ_MASKmskw_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPTESTNMQ (VPTESTNMQ-256-1)
+{
+ICLASS:      VPTESTNMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPTESTNMQ_MASKmskw_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPTESTNMQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x27 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ZEROING=0  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPTESTNMQ_MASKmskw_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPTESTNMW (VPTESTNMW-128-1)
+{
+ICLASS:      VPTESTNMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPTESTNMW_MASKmskw_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPTESTNMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPTESTNMW_MASKmskw_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPTESTNMW (VPTESTNMW-256-1)
+{
+ICLASS:      VPTESTNMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPTESTNMW_MASKmskw_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPTESTNMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPTESTNMW_MASKmskw_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPTESTNMW (VPTESTNMW-512-1)
+{
+ICLASS:      VPTESTNMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPTESTNMW_MASKmskw_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPTESTNMW
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x26 VF3 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ZEROING=0  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPTESTNMW_MASKmskw_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPUNPCKHBW (VPUNPCKHBW-128-1)
+{
+ICLASS:      VPUNPCKHBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x68 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPUNPCKHBW_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x68 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPUNPCKHBW_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPUNPCKHBW (VPUNPCKHBW-256-1)
+{
+ICLASS:      VPUNPCKHBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x68 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPUNPCKHBW_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x68 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPUNPCKHBW_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPUNPCKHBW (VPUNPCKHBW-512-1)
+{
+ICLASS:      VPUNPCKHBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x68 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPUNPCKHBW_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x68 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPUNPCKHBW_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPUNPCKHDQ (VPUNPCKHDQ-128-1)
+{
+ICLASS:      VPUNPCKHDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPUNPCKHDQ_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPUNPCKHDQ_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPUNPCKHDQ (VPUNPCKHDQ-256-1)
+{
+ICLASS:      VPUNPCKHDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6A V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPUNPCKHDQ_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6A V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPUNPCKHDQ_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPUNPCKHQDQ (VPUNPCKHQDQ-128-1)
+{
+ICLASS:      VPUNPCKHQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6D V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPUNPCKHQDQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6D V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPUNPCKHQDQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPUNPCKHQDQ (VPUNPCKHQDQ-256-1)
+{
+ICLASS:      VPUNPCKHQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6D V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPUNPCKHQDQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6D V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPUNPCKHQDQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPUNPCKHWD (VPUNPCKHWD-128-1)
+{
+ICLASS:      VPUNPCKHWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x69 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPUNPCKHWD_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x69 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPUNPCKHWD_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPUNPCKHWD (VPUNPCKHWD-256-1)
+{
+ICLASS:      VPUNPCKHWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x69 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPUNPCKHWD_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x69 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPUNPCKHWD_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPUNPCKHWD (VPUNPCKHWD-512-1)
+{
+ICLASS:      VPUNPCKHWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x69 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPUNPCKHWD_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPUNPCKHWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x69 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPUNPCKHWD_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPUNPCKLBW (VPUNPCKLBW-128-1)
+{
+ICLASS:      VPUNPCKLBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x60 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPUNPCKLBW_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x60 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPUNPCKLBW_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPUNPCKLBW (VPUNPCKLBW-256-1)
+{
+ICLASS:      VPUNPCKLBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x60 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPUNPCKLBW_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x60 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPUNPCKLBW_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPUNPCKLBW (VPUNPCKLBW-512-1)
+{
+ICLASS:      VPUNPCKLBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x60 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPUNPCKLBW_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLBW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x60 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPUNPCKLBW_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPUNPCKLDQ (VPUNPCKLDQ-128-1)
+{
+ICLASS:      VPUNPCKLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPUNPCKLDQ_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x62 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPUNPCKLDQ_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPUNPCKLDQ (VPUNPCKLDQ-256-1)
+{
+ICLASS:      VPUNPCKLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPUNPCKLDQ_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x62 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPUNPCKLDQ_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPUNPCKLQDQ (VPUNPCKLQDQ-128-1)
+{
+ICLASS:      VPUNPCKLQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6C V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPUNPCKLQDQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6C V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPUNPCKLQDQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPUNPCKLQDQ (VPUNPCKLQDQ-256-1)
+{
+ICLASS:      VPUNPCKLQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x6C V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPUNPCKLQDQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLQDQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x6C V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPUNPCKLQDQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPUNPCKLWD (VPUNPCKLWD-128-1)
+{
+ICLASS:      VPUNPCKLWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x61 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPUNPCKLWD_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x61 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPUNPCKLWD_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPUNPCKLWD (VPUNPCKLWD-256-1)
+{
+ICLASS:      VPUNPCKLWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x61 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPUNPCKLWD_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x61 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPUNPCKLWD_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPUNPCKLWD (VPUNPCKLWD-512-1)
+{
+ICLASS:      VPUNPCKLWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x61 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPUNPCKLWD_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPUNPCKLWD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512BW_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x61 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPUNPCKLWD_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPXORD (VPXORD-128-1)
+{
+ICLASS:      VPXORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPXORD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPXORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPXORD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPXORD (VPXORD-256-1)
+{
+ICLASS:      VPXORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPXORD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPXORD
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPXORD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPXORQ (VPXORQ-128-1)
+{
+ICLASS:      VPXORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPXORQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPXORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPXORQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPXORQ (VPXORQ-256-1)
+{
+ICLASS:      VPXORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xEF V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPXORQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPXORQ
+CPL:         3
+CATEGORY:    LOGICAL
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xEF V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPXORQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VRANGEPD (VRANGEPD-128-1)
+{
+ICLASS:      VRANGEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x50 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VRANGEPD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRANGEPD_XMMf64_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VRANGEPD (VRANGEPD-256-1)
+{
+ICLASS:      VRANGEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x50 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VRANGEPD_YMMf64_MASKmskw_YMMf64_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRANGEPD_YMMf64_MASKmskw_YMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VRANGEPD (VRANGEPD-512-1)
+{
+ICLASS:      VRANGEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x50 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VRANGEPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x50 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VRANGEPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRANGEPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VRANGEPS (VRANGEPS-128-1)
+{
+ICLASS:      VRANGEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x50 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VRANGEPS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRANGEPS_XMMf32_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRANGEPS (VRANGEPS-256-1)
+{
+ICLASS:      VRANGEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x50 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VRANGEPS_YMMf32_MASKmskw_YMMf32_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRANGEPS_YMMf32_MASKmskw_YMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRANGEPS (VRANGEPS-512-1)
+{
+ICLASS:      VRANGEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x50 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VRANGEPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x50 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VRANGEPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRANGEPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRANGESD (VRANGESD-128-1)
+{
+ICLASS:      VRANGESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x51 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VRANGESD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x51 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VRANGESD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x51 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1   UIMM8()  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64 IMM0:r:b
+IFORM:       VRANGESD_XMMf64_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VRANGESS (VRANGESS-128-1)
+{
+ICLASS:      VRANGESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x51 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VRANGESS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x51 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VRANGESS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRANGESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x51 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0   UIMM8()  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+IFORM:       VRANGESS_XMMf32_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRCP14PD (VRCP14PD-128-1)
+{
+ICLASS:      VRCP14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VRCP14PD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VRCP14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VRCP14PD_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VRCP14PD (VRCP14PD-256-1)
+{
+ICLASS:      VRCP14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VRCP14PD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VRCP14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VRCP14PD_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VRCP14PS (VRCP14PS-128-1)
+{
+ICLASS:      VRCP14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VRCP14PS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VRCP14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VRCP14PS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VRCP14PS (VRCP14PS-256-1)
+{
+ICLASS:      VRCP14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VRCP14PS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VRCP14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VRCP14PS_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VREDUCEPD (VREDUCEPD-128-1)
+{
+ICLASS:      VREDUCEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x56 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VREDUCEPD_XMMf64_MASKmskw_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VREDUCEPD_XMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VREDUCEPD (VREDUCEPD-256-1)
+{
+ICLASS:      VREDUCEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x56 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VREDUCEPD_YMMf64_MASKmskw_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VREDUCEPD_YMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VREDUCEPD (VREDUCEPD-512-1)
+{
+ICLASS:      VREDUCEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x56 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VREDUCEPD_ZMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x56 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf64 IMM0:r:b
+IFORM:       VREDUCEPD_ZMMf64_MASKmskw_ZMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VREDUCEPD_ZMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VREDUCEPS (VREDUCEPS-128-1)
+{
+ICLASS:      VREDUCEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x56 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VREDUCEPS_XMMf32_MASKmskw_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VREDUCEPS_XMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VREDUCEPS (VREDUCEPS-256-1)
+{
+ICLASS:      VREDUCEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x56 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VREDUCEPS_YMMf32_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VREDUCEPS_YMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VREDUCEPS (VREDUCEPS-512-1)
+{
+ICLASS:      VREDUCEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x56 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VREDUCEPS_ZMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x56 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN512() SAE()  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zf32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zf32 IMM0:r:b
+IFORM:       VREDUCEPS_ZMMf32_MASKmskw_ZMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x56 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VREDUCEPS_ZMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VREDUCESD (VREDUCESD-128-1)
+{
+ICLASS:      VREDUCESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x57 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VREDUCESD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x57 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VREDUCESD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCESD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x57 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W1   UIMM8()  ESIZE_64_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:q:f64 IMM0:r:b
+IFORM:       VREDUCESD_XMMf64_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VREDUCESS (VREDUCESS-128-1)
+{
+ICLASS:      VREDUCESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x57 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VREDUCESS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR SIMD_SCALAR
+PATTERN:    EVV 0x57 V66 V0F3A MOD[0b11] MOD=3 BCRC=1 REG[rrr] RM[nnn] FIX_ROUND_LEN128() SAE()  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32:TXT=SAESTR REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VREDUCESS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VREDUCESS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_SCALAR
+EXCEPTIONS:     AVX512-E3
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX MXCSR SIMD_SCALAR DISP8_SCALAR
+PATTERN:    EVV 0x57 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  W0   UIMM8()  ESIZE_32_BITS() NELEM_SCALAR()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:d:f32 IMM0:r:b
+IFORM:       VREDUCESS_XMMf32_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRNDSCALEPD (VRNDSCALEPD-128-1)
+{
+ICLASS:      VRNDSCALEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x09 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VRNDSCALEPD_XMMf64_MASKmskw_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x09 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRNDSCALEPD_XMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VRNDSCALEPD (VRNDSCALEPD-256-1)
+{
+ICLASS:      VRNDSCALEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x09 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VRNDSCALEPD_YMMf64_MASKmskw_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALEPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x09 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRNDSCALEPD_YMMf64_MASKmskw_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VRNDSCALEPS (VRNDSCALEPS-128-1)
+{
+ICLASS:      VRNDSCALEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x08 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VRNDSCALEPS_XMMf32_MASKmskw_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x08 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRNDSCALEPS_XMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRNDSCALEPS (VRNDSCALEPS-256-1)
+{
+ICLASS:      VRNDSCALEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x08 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VRNDSCALEPS_YMMf32_MASKmskw_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VRNDSCALEPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x08 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VRNDSCALEPS_YMMf32_MASKmskw_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VRSQRT14PD (VRSQRT14PD-128-1)
+{
+ICLASS:      VRSQRT14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VRSQRT14PD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VRSQRT14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VRSQRT14PD_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VRSQRT14PD (VRSQRT14PD-256-1)
+{
+ICLASS:      VRSQRT14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VRSQRT14PD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VRSQRT14PD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VRSQRT14PD_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VRSQRT14PS (VRSQRT14PS-128-1)
+{
+ICLASS:      VRSQRT14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VRSQRT14PS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VRSQRT14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VRSQRT14PS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VRSQRT14PS (VRSQRT14PS-256-1)
+{
+ICLASS:      VRSQRT14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x4E V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VRSQRT14PS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VRSQRT14PS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x4E V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VRSQRT14PS_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VSCALEFPD (VSCALEFPD-128-1)
+{
+ICLASS:      VSCALEFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x2C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VSCALEFPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSCALEFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x2C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSCALEFPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSCALEFPD (VSCALEFPD-256-1)
+{
+ICLASS:      VSCALEFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x2C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VSCALEFPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VSCALEFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x2C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSCALEFPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSCALEFPS (VSCALEFPS-128-1)
+{
+ICLASS:      VSCALEFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x2C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VSCALEFPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSCALEFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x2C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSCALEFPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VSCALEFPS (VSCALEFPS-256-1)
+{
+ICLASS:      VSCALEFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x2C V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VSCALEFPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VSCALEFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x2C V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSCALEFPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VSCATTERDPD (VSCATTERDPD-128-1)
+{
+ICLASS:      VSCATTERDPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA2 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:f64 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:f64
+IFORM:       VSCATTERDPD_MEMf64_MASKmskw_XMMf64_AVX512_VL128
+}
+
+
+# EMITTING VSCATTERDPD (VSCATTERDPD-256-1)
+{
+ICLASS:      VSCATTERDPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA2 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:f64 REG0=MASKNOT0():rw:mskw REG1=YMM_R3():r:qq:f64
+IFORM:       VSCATTERDPD_MEMf64_MASKmskw_YMMf64_AVX512_VL256
+}
+
+
+# EMITTING VSCATTERDPS (VSCATTERDPS-128-1)
+{
+ICLASS:      VSCATTERDPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA2 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W0 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:f32 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:f32
+IFORM:       VSCATTERDPS_MEMf32_MASKmskw_XMMf32_AVX512_VL128
+}
+
+
+# EMITTING VSCATTERDPS (VSCATTERDPS-256-1)
+{
+ICLASS:      VSCATTERDPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  DWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA2 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W0 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:f32 REG0=MASKNOT0():rw:mskw REG1=YMM_R3():r:qq:f32
+IFORM:       VSCATTERDPS_MEMf32_MASKmskw_YMMf32_AVX512_VL256
+}
+
+
+# EMITTING VSCATTERQPD (VSCATTERQPD-128-1)
+{
+ICLASS:      VSCATTERQPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA3 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W1 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:f64 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:f64
+IFORM:       VSCATTERQPD_MEMf64_MASKmskw_XMMf64_AVX512_VL128
+}
+
+
+# EMITTING VSCATTERQPD (VSCATTERQPD-256-1)
+{
+ICLASS:      VSCATTERQPD
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA3 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W1 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_64_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:q:f64 REG0=MASKNOT0():rw:mskw REG1=YMM_R3():r:qq:f64
+IFORM:       VSCATTERQPD_MEMf64_MASKmskw_YMMf64_AVX512_VL256
+}
+
+
+# EMITTING VSCATTERQPS (VSCATTERQPS-128-1)
+{
+ICLASS:      VSCATTERQPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA3 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL128  W0 UISA_VMODRM_XMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:f32 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:f32
+IFORM:       VSCATTERQPS_MEMf32_MASKmskw_XMMf32_AVX512_VL128
+}
+
+
+# EMITTING VSCATTERQPS (VSCATTERQPS-256-1)
+{
+ICLASS:      VSCATTERQPS
+CPL:         3
+CATEGORY:    SCATTER
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E12
+REAL_OPCODE: Y
+ATTRIBUTES:  QWORD_INDICES DISP8_GSCAT MEMORY_FAULT_SUPPRESSION MASKOP_EVEX SPECIAL_AGEN_REQUIRED SCATTER
+PATTERN:    EVV 0xA3 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[0b100] RM=4 BCRC=0   VL256  W0 UISA_VMODRM_YMM() eanot16  NOVSR  ZEROING=0  ESIZE_32_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:d:f32 REG0=MASKNOT0():rw:mskw REG1=XMM_R3():r:dq:f32
+IFORM:       VSCATTERQPS_MEMf32_MASKmskw_XMMf32_AVX512_VL256
+}
+
+
+# EMITTING VSHUFF32X4 (VSHUFF32X4-256-1)
+{
+ICLASS:      VSHUFF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VSHUFF32X4_YMMf32_MASKmskw_YMMf32_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFF32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x23 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFF32X4_YMMf32_MASKmskw_YMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFF64X2 (VSHUFF64X2-256-1)
+{
+ICLASS:      VSHUFF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x23 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VSHUFF64X2_YMMf64_MASKmskw_YMMf64_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFF64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x23 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFF64X2_YMMf64_MASKmskw_YMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFI32X4 (VSHUFI32X4-256-1)
+{
+ICLASS:      VSHUFI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x43 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VSHUFI32X4_YMMu32_MASKmskw_YMMu32_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFI32X4
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x43 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFI32X4_YMMu32_MASKmskw_YMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFI64X2 (VSHUFI64X2-256-1)
+{
+ICLASS:      VSHUFI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x43 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VSHUFI64X2_YMMu64_MASKmskw_YMMu64_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFI64X2
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x43 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFI64X2_YMMu64_MASKmskw_YMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFPD (VSHUFPD-128-1)
+{
+ICLASS:      VSHUFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64 IMM0:r:b
+IFORM:       VSHUFPD_XMMf64_MASKmskw_XMMf64_XMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFPD_XMMf64_MASKmskw_XMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFPD (VSHUFPD-256-1)
+{
+ICLASS:      VSHUFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC6 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64 IMM0:r:b
+IFORM:       VSHUFPD_YMMf64_MASKmskw_YMMf64_YMMf64_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC6 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFPD_YMMf64_MASKmskw_YMMf64_MEMf64_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFPS (VSHUFPS-128-1)
+{
+ICLASS:      VSHUFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC6 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32 IMM0:r:b
+IFORM:       VSHUFPS_XMMf32_MASKmskw_XMMf32_XMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC6 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFPS_XMMf32_MASKmskw_XMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VSHUFPS (VSHUFPS-256-1)
+{
+ICLASS:      VSHUFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xC6 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32 IMM0:r:b
+IFORM:       VSHUFPS_YMMf32_MASKmskw_YMMf32_YMMf32_IMM8_AVX512
+}
+
+{
+ICLASS:      VSHUFPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xC6 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VSHUFPS_YMMf32_MASKmskw_YMMf32_MEMf32_IMM8_AVX512
+}
+
+
+# EMITTING VSQRTPD (VSQRTPD-128-1)
+{
+ICLASS:      VSQRTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x51 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f64
+IFORM:       VSQRTPD_XMMf64_MASKmskw_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSQRTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x51 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSQRTPD_XMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VSQRTPD (VSQRTPD-256-1)
+{
+ICLASS:      VSQRTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x51 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f64
+IFORM:       VSQRTPD_YMMf64_MASKmskw_YMMf64_AVX512
+}
+
+{
+ICLASS:      VSQRTPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x51 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSQRTPD_YMMf64_MASKmskw_MEMf64_AVX512
+}
+
+
+# EMITTING VSQRTPS (VSQRTPS-128-1)
+{
+ICLASS:      VSQRTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x51 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:f32
+IFORM:       VSQRTPS_XMMf32_MASKmskw_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSQRTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x51 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSQRTPS_XMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VSQRTPS (VSQRTPS-256-1)
+{
+ICLASS:      VSQRTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x51 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:f32
+IFORM:       VSQRTPS_YMMf32_MASKmskw_YMMf32_AVX512
+}
+
+{
+ICLASS:      VSQRTPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x51 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSQRTPS_YMMf32_MASKmskw_MEMf32_AVX512
+}
+
+
+# EMITTING VSUBPD (VSUBPD-128-1)
+{
+ICLASS:      VSUBPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5C V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VSUBPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VSUBPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5C V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSUBPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSUBPD (VSUBPD-256-1)
+{
+ICLASS:      VSUBPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5C V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VSUBPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VSUBPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5C V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VSUBPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VSUBPS (VSUBPS-128-1)
+{
+ICLASS:      VSUBPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5C VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VSUBPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VSUBPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5C VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSUBPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VSUBPS (VSUBPS-256-1)
+{
+ICLASS:      VSUBPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX MXCSR
+PATTERN:    EVV 0x5C VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VSUBPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VSUBPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E2
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL MXCSR BROADCAST_ENABLED
+PATTERN:    EVV 0x5C VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VSUBPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VUNPCKHPD (VUNPCKHPD-128-1)
+{
+ICLASS:      VUNPCKHPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VUNPCKHPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VUNPCKHPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VUNPCKHPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VUNPCKHPD (VUNPCKHPD-256-1)
+{
+ICLASS:      VUNPCKHPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VUNPCKHPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VUNPCKHPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VUNPCKHPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VUNPCKHPS (VUNPCKHPS-128-1)
+{
+ICLASS:      VUNPCKHPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VUNPCKHPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VUNPCKHPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VUNPCKHPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VUNPCKHPS (VUNPCKHPS-256-1)
+{
+ICLASS:      VUNPCKHPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x15 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VUNPCKHPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VUNPCKHPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x15 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VUNPCKHPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VUNPCKLPD (VUNPCKLPD-128-1)
+{
+ICLASS:      VUNPCKLPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VUNPCKLPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VUNPCKLPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VUNPCKLPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VUNPCKLPD (VUNPCKLPD-256-1)
+{
+ICLASS:      VUNPCKLPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VUNPCKLPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VUNPCKLPD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VUNPCKLPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VUNPCKLPS (VUNPCKLPS-128-1)
+{
+ICLASS:      VUNPCKLPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VUNPCKLPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VUNPCKLPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VUNPCKLPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VUNPCKLPS (VUNPCKLPS-256-1)
+{
+ICLASS:      VUNPCKLPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x14 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VUNPCKLPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VUNPCKLPS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512F_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x14 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VUNPCKLPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VXORPD (VXORPD-128-1)
+{
+ICLASS:      VXORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x57 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 REG3=XMM_B3():r:dq:f64
+IFORM:       VXORPD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512
+}
+
+{
+ICLASS:      VXORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x57 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VXORPD_XMMf64_MASKmskw_XMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VXORPD (VXORPD-256-1)
+{
+ICLASS:      VXORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x57 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 REG3=YMM_B3():r:qq:f64
+IFORM:       VXORPD_YMMf64_MASKmskw_YMMf64_YMMf64_AVX512
+}
+
+{
+ICLASS:      VXORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x57 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VXORPD_YMMf64_MASKmskw_YMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VXORPD (VXORPD-512-1)
+{
+ICLASS:      VXORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x57 V66 V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 REG3=ZMM_B3():r:zf64
+IFORM:       VXORPD_ZMMf64_MASKmskw_ZMMf64_ZMMf64_AVX512
+}
+
+{
+ICLASS:      VXORPD
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x57 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf64 MEM0:r:vv:f64:TXT=BCASTSTR
+IFORM:       VXORPD_ZMMf64_MASKmskw_ZMMf64_MEMf64_AVX512
+}
+
+
+# EMITTING VXORPS (VXORPS-128-1)
+{
+ICLASS:      VXORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x57 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 REG3=XMM_B3():r:dq:f32
+IFORM:       VXORPS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512
+}
+
+{
+ICLASS:      VXORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x57 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VXORPS_XMMf32_MASKmskw_XMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VXORPS (VXORPS-256-1)
+{
+ICLASS:      VXORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x57 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 REG3=YMM_B3():r:qq:f32
+IFORM:       VXORPS_YMMf32_MASKmskw_YMMf32_YMMf32_AVX512
+}
+
+{
+ICLASS:      VXORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x57 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:f32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:f32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VXORPS_YMMf32_MASKmskw_YMMf32_MEMf32_AVX512
+}
+
+
+# EMITTING VXORPS (VXORPS-512-1)
+{
+ICLASS:      VXORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x57 VNP V0F MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 REG3=ZMM_B3():r:zf32
+IFORM:       VXORPS_ZMMf32_MASKmskw_ZMMf32_ZMMf32_AVX512
+}
+
+{
+ICLASS:      VXORPS
+CPL:         3
+CATEGORY:    LOGICAL_FP
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512DQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x57 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zf32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zf32 MEM0:r:vv:f32:TXT=BCASTSTR
+IFORM:       VXORPS_ZMMf32_MASKmskw_ZMMf32_MEMf32_AVX512
+}
+
+
+AVX_INSTRUCTIONS()::
+# EMITTING KADDB (KADDB-256-1)
+{
+ICLASS:      KADDB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x4A V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KADDB_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KADDD (KADDD-256-1)
+{
+ICLASS:      KADDD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x4A V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KADDD_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KADDQ (KADDQ-256-1)
+{
+ICLASS:      KADDQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x4A VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KADDQ_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KADDW (KADDW-256-1)
+{
+ICLASS:      KADDW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x4A VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KADDW_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KANDB (KANDB-256-1)
+{
+ICLASS:      KANDB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x41 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KANDB_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KANDD (KANDD-256-1)
+{
+ICLASS:      KANDD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x41 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KANDD_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KANDNB (KANDNB-256-1)
+{
+ICLASS:      KANDNB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x42 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KANDNB_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KANDND (KANDND-256-1)
+{
+ICLASS:      KANDND
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x42 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KANDND_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KANDNQ (KANDNQ-256-1)
+{
+ICLASS:      KANDNQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x42 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KANDNQ_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KANDQ (KANDQ-256-1)
+{
+ICLASS:      KANDQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x41 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KANDQ_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KMOVB (KMOVB-128-1)
+{
+ICLASS:      KMOVB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x90 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw:u8
+IFORM:       KMOVB_MASKmskw_MASKu8_AVX512
+}
+
+{
+ICLASS:      KMOVB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x90 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw MEM0:r:b:u8
+IFORM:       KMOVB_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING KMOVB (KMOVB-128-2)
+{
+ICLASS:      KMOVB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x91 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL=0  W0  NOVSR
+OPERANDS:    MEM0:w:b:u8 REG0=MASK_R():r:mskw
+IFORM:       KMOVB_MEMu8_MASKmskw_AVX512
+}
+
+
+# EMITTING KMOVB (KMOVB-128-3)
+{
+ICLASS:      KMOVB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x92 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=GPR32_B():r:d:u32
+IFORM:       KMOVB_MASKmskw_GPR32u32_AVX512
+}
+
+
+# EMITTING KMOVB (KMOVB-128-4)
+{
+ICLASS:      KMOVB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x93 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=GPR32_R():w:d:u32 REG1=MASK_B():r:mskw
+IFORM:       KMOVB_GPR32u32_MASKmskw_AVX512
+}
+
+
+# EMITTING KMOVD (KMOVD-128-1)
+{
+ICLASS:      KMOVD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x90 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw:u32
+IFORM:       KMOVD_MASKmskw_MASKu32_AVX512
+}
+
+{
+ICLASS:      KMOVD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x90 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw MEM0:r:d:u32
+IFORM:       KMOVD_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING KMOVD (KMOVD-128-2)
+{
+ICLASS:      KMOVD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x91 V66 V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL=0  W1  NOVSR
+OPERANDS:    MEM0:w:d:u32 REG0=MASK_R():r:mskw
+IFORM:       KMOVD_MEMu32_MASKmskw_AVX512
+}
+
+
+# EMITTING KMOVD (KMOVD-128-3)
+{
+ICLASS:      KMOVD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+COMMENT:     KMOVQ aliases to KMOVD in 32b mode due to W bit being ignored.
+PATTERN:    VV1 0x92 VF2 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0 mode64  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=GPR32_B():r:d:u32
+IFORM:       KMOVD_MASKmskw_GPR32u32_AVX512
+
+PATTERN:    VV1 0x92 VF2 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  not64 NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=GPR32_B():r:d:u32
+IFORM:       KMOVD_MASKmskw_GPR32u32_AVX512
+}
+
+
+# EMITTING KMOVD (KMOVD-128-4)
+{
+ICLASS:      KMOVD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+COMMENT:     KMOVQ aliases to KMOVD in 32b mode due to W bit being ignored.
+PATTERN:    VV1 0x93 VF2 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  mode64 NOVSR
+OPERANDS:    REG0=GPR32_R():w:d:u32 REG1=MASK_B():r:mskw
+IFORM:       KMOVD_GPR32u32_MASKmskw_AVX512
+
+PATTERN:    VV1 0x93 VF2 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  not64  NOVSR
+OPERANDS:    REG0=GPR32_R():w:d:u32 REG1=MASK_B():r:mskw
+IFORM:       KMOVD_GPR32u32_MASKmskw_AVX512
+}
+
+
+# EMITTING KMOVQ (KMOVQ-128-1)
+{
+ICLASS:      KMOVQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x90 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw:u64
+IFORM:       KMOVQ_MASKmskw_MASKu64_AVX512
+}
+
+{
+ICLASS:      KMOVQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x90 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw MEM0:r:q:u64
+IFORM:       KMOVQ_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING KMOVQ (KMOVQ-128-2)
+{
+ICLASS:      KMOVQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K21
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x91 VNP V0F MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL=0  W1  NOVSR
+OPERANDS:    MEM0:w:q:u64 REG0=MASK_R():r:mskw
+IFORM:       KMOVQ_MEMu64_MASKmskw_AVX512
+}
+
+
+# EMITTING KMOVQ (KMOVQ-128-3)
+{
+ICLASS:      KMOVQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x92 VF2 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  mode64  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=GPR64_B():r:q:u64
+IFORM:       KMOVQ_MASKmskw_GPR64u64_AVX512
+}
+
+
+# EMITTING KMOVQ (KMOVQ-128-4)
+{
+ICLASS:      KMOVQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x93 VF2 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  mode64  NOVSR
+OPERANDS:    REG0=GPR64_R():w:q:u64 REG1=MASK_B():r:mskw
+IFORM:       KMOVQ_GPR64u64_MASKmskw_AVX512
+}
+
+
+# EMITTING KNOTB (KNOTB-128-1)
+{
+ICLASS:      KNOTB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x44 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw
+IFORM:       KNOTB_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KNOTD (KNOTD-128-1)
+{
+ICLASS:      KNOTD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x44 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw
+IFORM:       KNOTD_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KNOTQ (KNOTQ-128-1)
+{
+ICLASS:      KNOTQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x44 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw
+IFORM:       KNOTQ_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KORB (KORB-256-1)
+{
+ICLASS:      KORB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x45 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KORB_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KORD (KORD-256-1)
+{
+ICLASS:      KORD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x45 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KORD_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KORQ (KORQ-256-1)
+{
+ICLASS:      KORQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x45 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KORQ_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KORTESTB (KORTESTB-128-1)
+{
+ICLASS:      KORTESTB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-0 of-0 af-0 sf-0 ]
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x98 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():r:mskw REG1=MASK_B():r:mskw
+IFORM:       KORTESTB_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KORTESTD (KORTESTD-128-1)
+{
+ICLASS:      KORTESTD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-0 of-0 af-0 sf-0 ]
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x98 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():r:mskw REG1=MASK_B():r:mskw
+IFORM:       KORTESTD_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KORTESTQ (KORTESTQ-128-1)
+{
+ICLASS:      KORTESTQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-0 of-0 af-0 sf-0 ]
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x98 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():r:mskw REG1=MASK_B():r:mskw
+IFORM:       KORTESTQ_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KSHIFTLB (KSHIFTLB-128-1)
+{
+ICLASS:      KSHIFTLB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x32 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw IMM0:r:b
+IFORM:       KSHIFTLB_MASKmskw_MASKmskw_IMM8_AVX512
+}
+
+
+# EMITTING KSHIFTLD (KSHIFTLD-128-1)
+{
+ICLASS:      KSHIFTLD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x33 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw IMM0:r:b
+IFORM:       KSHIFTLD_MASKmskw_MASKmskw_IMM8_AVX512
+}
+
+
+# EMITTING KSHIFTLQ (KSHIFTLQ-128-1)
+{
+ICLASS:      KSHIFTLQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x33 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw IMM0:r:b
+IFORM:       KSHIFTLQ_MASKmskw_MASKmskw_IMM8_AVX512
+}
+
+
+# EMITTING KSHIFTRB (KSHIFTRB-128-1)
+{
+ICLASS:      KSHIFTRB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x30 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw IMM0:r:b
+IFORM:       KSHIFTRB_MASKmskw_MASKmskw_IMM8_AVX512
+}
+
+
+# EMITTING KSHIFTRD (KSHIFTRD-128-1)
+{
+ICLASS:      KSHIFTRD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x31 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw IMM0:r:b
+IFORM:       KSHIFTRD_MASKmskw_MASKmskw_IMM8_AVX512
+}
+
+
+# EMITTING KSHIFTRQ (KSHIFTRQ-128-1)
+{
+ICLASS:      KSHIFTRQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x31 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR UIMM8()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_B():r:mskw IMM0:r:b
+IFORM:       KSHIFTRQ_MASKmskw_MASKmskw_IMM8_AVX512
+}
+
+
+# EMITTING KTESTB (KTESTB-128-1)
+{
+ICLASS:      KTESTB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-0 of-0 af-0 sf-0 ]
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x99 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():r:mskw REG1=MASK_B():r:mskw
+IFORM:       KTESTB_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KTESTD (KTESTD-128-1)
+{
+ICLASS:      KTESTD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-0 of-0 af-0 sf-0 ]
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x99 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():r:mskw REG1=MASK_B():r:mskw
+IFORM:       KTESTD_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KTESTQ (KTESTQ-128-1)
+{
+ICLASS:      KTESTQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-0 of-0 af-0 sf-0 ]
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x99 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W1  NOVSR
+OPERANDS:    REG0=MASK_R():r:mskw REG1=MASK_B():r:mskw
+IFORM:       KTESTQ_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KTESTW (KTESTW-128-1)
+{
+ICLASS:      KTESTW
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+FLAGS:       MUST [ cf-mod zf-mod  pf-0 of-0 af-0 sf-0 ]
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x99 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=0  W0  NOVSR
+OPERANDS:    REG0=MASK_R():r:mskw REG1=MASK_B():r:mskw
+IFORM:       KTESTW_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KUNPCKDQ (KUNPCKDQ-256-1)
+{
+ICLASS:      KUNPCKDQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x4B VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KUNPCKDQ_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KUNPCKWD (KUNPCKWD-256-1)
+{
+ICLASS:      KUNPCKWD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x4B VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KUNPCKWD_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KXNORB (KXNORB-256-1)
+{
+ICLASS:      KXNORB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x46 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KXNORB_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KXNORD (KXNORD-256-1)
+{
+ICLASS:      KXNORD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x46 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KXNORD_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KXNORQ (KXNORQ-256-1)
+{
+ICLASS:      KXNORQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x46 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KXNORQ_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KXORB (KXORB-256-1)
+{
+ICLASS:      KXORB
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512DQ_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x47 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KXORB_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KXORD (KXORD-256-1)
+{
+ICLASS:      KXORD
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x47 V66 V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KXORD_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+# EMITTING KXORQ (KXORQ-256-1)
+{
+ICLASS:      KXORQ
+CPL:         3
+CATEGORY:    KMASK
+EXTENSION:   AVX512VEX
+ISA_SET:     AVX512BW_KOP
+EXCEPTIONS:     AVX512-K20
+REAL_OPCODE: Y
+ATTRIBUTES:  KMASK
+PATTERN:    VV1 0x47 VNP V0F MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL=1  W1
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK_N():r:mskw REG2=MASK_B():r:mskw
+IFORM:       KXORQ_MASKmskw_MASKmskw_MASKmskw_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/avx512ifma/ifma-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VPMADD52HUQ (VPMADD52HUQ-128-1)
+{
+ICLASS:      VPMADD52HUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xB5 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPMADD52HUQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPMADD52HUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xB5 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMADD52HUQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMADD52HUQ (VPMADD52HUQ-256-1)
+{
+ICLASS:      VPMADD52HUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xB5 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPMADD52HUQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPMADD52HUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xB5 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMADD52HUQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMADD52HUQ (VPMADD52HUQ-512-1)
+{
+ICLASS:      VPMADD52HUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xB5 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPMADD52HUQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPMADD52HUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xB5 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMADD52HUQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMADD52LUQ (VPMADD52LUQ-128-1)
+{
+ICLASS:      VPMADD52LUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xB4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPMADD52LUQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPMADD52LUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xB4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMADD52LUQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMADD52LUQ (VPMADD52LUQ-256-1)
+{
+ICLASS:      VPMADD52LUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xB4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPMADD52LUQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPMADD52LUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xB4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMADD52LUQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPMADD52LUQ (VPMADD52LUQ-512-1)
+{
+ICLASS:      VPMADD52LUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xB4 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPMADD52LUQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPMADD52LUQ
+CPL:         3
+CATEGORY:    IFMA
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_IFMA_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xB4 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMADD52LUQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/avx512vbmi/vbmi-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VPERMB (VPERMB-128-1)
+{
+ICLASS:      VPERMB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPERMB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x8D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPERMB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPERMB (VPERMB-256-1)
+{
+ICLASS:      VPERMB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPERMB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x8D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPERMB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPERMB (VPERMB-512-1)
+{
+ICLASS:      VPERMB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x8D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPERMB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x8D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPERMB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPERMI2B (VPERMI2B-128-1)
+{
+ICLASS:      VPERMI2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPERMI2B_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMI2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():rw:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPERMI2B_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPERMI2B (VPERMI2B-256-1)
+{
+ICLASS:      VPERMI2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPERMI2B_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMI2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():rw:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPERMI2B_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPERMI2B (VPERMI2B-512-1)
+{
+ICLASS:      VPERMI2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x75 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPERMI2B_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMI2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x75 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():rw:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPERMI2B_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPERMT2B (VPERMT2B-128-1)
+{
+ICLASS:      VPERMT2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VPERMT2B_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMT2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():rw:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VPERMT2B_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPERMT2B (VPERMT2B-256-1)
+{
+ICLASS:      VPERMT2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VPERMT2B_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMT2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():rw:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VPERMT2B_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPERMT2B (VPERMT2B-512-1)
+{
+ICLASS:      VPERMT2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x7D V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VPERMT2B_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPERMT2B
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x7D V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():rw:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VPERMT2B_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VPMULTISHIFTQB (VPMULTISHIFTQB-128-1)
+{
+ICLASS:      VPMULTISHIFTQB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x83 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u64
+IFORM:       VPMULTISHIFTQB_XMMu8_MASKmskw_XMMu8_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPMULTISHIFTQB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_128
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x83 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMULTISHIFTQB_XMMu8_MASKmskw_XMMu8_MEMu64_AVX512
+}
+
+
+# EMITTING VPMULTISHIFTQB (VPMULTISHIFTQB-256-1)
+{
+ICLASS:      VPMULTISHIFTQB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x83 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u64
+IFORM:       VPMULTISHIFTQB_YMMu8_MASKmskw_YMMu8_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPMULTISHIFTQB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_256
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x83 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMULTISHIFTQB_YMMu8_MASKmskw_YMMu8_MEMu64_AVX512
+}
+
+
+# EMITTING VPMULTISHIFTQB (VPMULTISHIFTQB-512-1)
+{
+ICLASS:      VPMULTISHIFTQB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x83 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu64
+IFORM:       VPMULTISHIFTQB_ZMMu8_MASKmskw_ZMMu8_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPMULTISHIFTQB
+CPL:         3
+CATEGORY:    AVX512_VBMI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI_512
+EXCEPTIONS:     AVX512-E4NF
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x83 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPMULTISHIFTQB_ZMMu8_MASKmskw_ZMMu8_MEMu64_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/bitalg/bitalg-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VPOPCNTB (VPOPCNTB-128-1)
+{
+ICLASS:      VPOPCNTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x54 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u8
+IFORM:       VPOPCNTB_XMMu8_MASKmskw_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPOPCNTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x54 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u8
+IFORM:       VPOPCNTB_XMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPOPCNTB (VPOPCNTB-256-1)
+{
+ICLASS:      VPOPCNTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x54 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u8
+IFORM:       VPOPCNTB_YMMu8_MASKmskw_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPOPCNTB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x54 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u8
+IFORM:       VPOPCNTB_YMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPOPCNTB (VPOPCNTB-512-1)
+{
+ICLASS:      VPOPCNTB
+CPL:         3
+CATEGORY:    AVX512_BITALG
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x54 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu8
+IFORM:       VPOPCNTB_ZMMu8_MASKmskw_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPOPCNTB
+CPL:         3
+CATEGORY:    AVX512_BITALG
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x54 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u8
+IFORM:       VPOPCNTB_ZMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPOPCNTW (VPOPCNTW-128-1)
+{
+ICLASS:      VPOPCNTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x54 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16
+IFORM:       VPOPCNTW_XMMu16_MASKmskw_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPOPCNTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x54 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u16
+IFORM:       VPOPCNTW_XMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPOPCNTW (VPOPCNTW-256-1)
+{
+ICLASS:      VPOPCNTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x54 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u16
+IFORM:       VPOPCNTW_YMMu16_MASKmskw_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPOPCNTW
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x54 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u16
+IFORM:       VPOPCNTW_YMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPOPCNTW (VPOPCNTW-512-1)
+{
+ICLASS:      VPOPCNTW
+CPL:         3
+CATEGORY:    AVX512_BITALG
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x54 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu16
+IFORM:       VPOPCNTW_ZMMu16_MASKmskw_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPOPCNTW
+CPL:         3
+CATEGORY:    AVX512_BITALG
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x54 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u16
+IFORM:       VPOPCNTW_ZMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPSHUFBITQMB (VPSHUFBITQMB-128-1)
+{
+ICLASS:      VPSHUFBITQMB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x8F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u8
+IFORM:       VPSHUFBITQMB_MASKmskw_MASKmskw_XMMu64_XMMu8_AVX512
+}
+
+{
+ICLASS:      VPSHUFBITQMB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x8F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=XMM_N3():r:dq:u64 MEM0:r:dq:u8
+IFORM:       VPSHUFBITQMB_MASKmskw_MASKmskw_XMMu64_MEMu8_AVX512
+}
+
+
+# EMITTING VPSHUFBITQMB (VPSHUFBITQMB-256-1)
+{
+ICLASS:      VPSHUFBITQMB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x8F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u8
+IFORM:       VPSHUFBITQMB_MASKmskw_MASKmskw_YMMu64_YMMu8_AVX512
+}
+
+{
+ICLASS:      VPSHUFBITQMB
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x8F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=YMM_N3():r:qq:u64 MEM0:r:qq:u8
+IFORM:       VPSHUFBITQMB_MASKmskw_MASKmskw_YMMu64_MEMu8_AVX512
+}
+
+
+# EMITTING VPSHUFBITQMB (VPSHUFBITQMB-512-1)
+{
+ICLASS:      VPSHUFBITQMB
+CPL:         3
+CATEGORY:    AVX512_BITALG
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:     EVV 0x8F V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0    ZEROING=0
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu8
+IFORM:       VPSHUFBITQMB_MASKmskw_MASKmskw_ZMMu64_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VPSHUFBITQMB
+CPL:         3
+CATEGORY:    AVX512_BITALG
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_BITALG_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:     EVV 0x8F V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ZEROING=0  ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=MASK_R():w:mskw REG1=MASK1():r:mskw REG2=ZMM_N3():r:zu64 MEM0:r:zd:u8
+IFORM:       VPSHUFBITQMB_MASKmskw_MASKmskw_ZMMu64_MEMu8_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/vbmi2/vbmi2-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VPCOMPRESSB (VPCOMPRESSB-128-1)
+{
+ICLASS:      VPCOMPRESSB
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x63 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:dq:u8 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u8
+IFORM:       VPCOMPRESSB_MEMu8_MASKmskw_XMMu8_AVX512
+}
+
+
+# EMITTING VPCOMPRESSB (VPCOMPRESSB-128-2)
+{
+ICLASS:      VPCOMPRESSB
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u8
+IFORM:       VPCOMPRESSB_XMMu8_MASKmskw_XMMu8_AVX512
+}
+
+
+# EMITTING VPCOMPRESSB (VPCOMPRESSB-256-1)
+{
+ICLASS:      VPCOMPRESSB
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x63 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:qq:u8 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u8
+IFORM:       VPCOMPRESSB_MEMu8_MASKmskw_YMMu8_AVX512
+}
+
+
+# EMITTING VPCOMPRESSB (VPCOMPRESSB-256-2)
+{
+ICLASS:      VPCOMPRESSB
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u8
+IFORM:       VPCOMPRESSB_YMMu8_MASKmskw_YMMu8_AVX512
+}
+
+
+# EMITTING VPCOMPRESSB (VPCOMPRESSB-512-1)
+{
+ICLASS:      VPCOMPRESSB
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x63 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ZEROING=0  ESIZE_8_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:zd:u8 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu8
+IFORM:       VPCOMPRESSB_MEMu8_MASKmskw_ZMMu8_AVX512
+}
+
+
+# EMITTING VPCOMPRESSB (VPCOMPRESSB-512-2)
+{
+ICLASS:      VPCOMPRESSB
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu8
+IFORM:       VPCOMPRESSB_ZMMu8_MASKmskw_ZMMu8_AVX512
+}
+
+
+# EMITTING VPCOMPRESSW (VPCOMPRESSW-128-1)
+{
+ICLASS:      VPCOMPRESSW
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x63 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:dq:u16 REG0=MASK1():r:mskw REG1=XMM_R3():r:dq:u16
+IFORM:       VPCOMPRESSW_MEMu16_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VPCOMPRESSW (VPCOMPRESSW-128-2)
+{
+ICLASS:      VPCOMPRESSW
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_B3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_R3():r:dq:u16
+IFORM:       VPCOMPRESSW_XMMu16_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VPCOMPRESSW (VPCOMPRESSW-256-1)
+{
+ICLASS:      VPCOMPRESSW
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x63 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:qq:u16 REG0=MASK1():r:mskw REG1=YMM_R3():r:qq:u16
+IFORM:       VPCOMPRESSW_MEMu16_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VPCOMPRESSW (VPCOMPRESSW-256-2)
+{
+ICLASS:      VPCOMPRESSW
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_B3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_R3():r:qq:u16
+IFORM:       VPCOMPRESSW_YMMu16_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VPCOMPRESSW (VPCOMPRESSW-512-1)
+{
+ICLASS:      VPCOMPRESSW
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x63 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ZEROING=0  ESIZE_16_BITS() NELEM_GSCAT()
+OPERANDS:    MEM0:w:zd:u16 REG0=MASK1():r:mskw REG1=ZMM_R3():r:zu16
+IFORM:       VPCOMPRESSW_MEMu16_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VPCOMPRESSW (VPCOMPRESSW-512-2)
+{
+ICLASS:      VPCOMPRESSW
+CPL:         3
+CATEGORY:    COMPRESS
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x63 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_B3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_R3():r:zu16
+IFORM:       VPCOMPRESSW_ZMMu16_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VPEXPANDB (VPEXPANDB-128-1)
+{
+ICLASS:      VPEXPANDB
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x62 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0  NOEVSR  ESIZE_8_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u8
+IFORM:       VPEXPANDB_XMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPEXPANDB (VPEXPANDB-128-2)
+{
+ICLASS:      VPEXPANDB
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u8
+IFORM:       VPEXPANDB_XMMu8_MASKmskw_XMMu8_AVX512
+}
+
+
+# EMITTING VPEXPANDB (VPEXPANDB-256-1)
+{
+ICLASS:      VPEXPANDB
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x62 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0  NOEVSR  ESIZE_8_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u8
+IFORM:       VPEXPANDB_YMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPEXPANDB (VPEXPANDB-256-2)
+{
+ICLASS:      VPEXPANDB
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u8
+IFORM:       VPEXPANDB_YMMu8_MASKmskw_YMMu8_AVX512
+}
+
+
+# EMITTING VPEXPANDB (VPEXPANDB-512-1)
+{
+ICLASS:      VPEXPANDB
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x62 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0  NOEVSR  ESIZE_8_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u8
+IFORM:       VPEXPANDB_ZMMu8_MASKmskw_MEMu8_AVX512
+}
+
+
+# EMITTING VPEXPANDB (VPEXPANDB-512-2)
+{
+ICLASS:      VPEXPANDB
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu8
+IFORM:       VPEXPANDB_ZMMu8_MASKmskw_ZMMu8_AVX512
+}
+
+
+# EMITTING VPEXPANDW (VPEXPANDW-128-1)
+{
+ICLASS:      VPEXPANDW
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x62 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1  NOEVSR  ESIZE_16_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:dq:u16
+IFORM:       VPEXPANDW_XMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPEXPANDW (VPEXPANDW-128-2)
+{
+ICLASS:      VPEXPANDW
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u16
+IFORM:       VPEXPANDW_XMMu16_MASKmskw_XMMu16_AVX512
+}
+
+
+# EMITTING VPEXPANDW (VPEXPANDW-256-1)
+{
+ICLASS:      VPEXPANDW
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x62 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1  NOEVSR  ESIZE_16_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:qq:u16
+IFORM:       VPEXPANDW_YMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPEXPANDW (VPEXPANDW-256-2)
+{
+ICLASS:      VPEXPANDW
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u16
+IFORM:       VPEXPANDW_YMMu16_MASKmskw_YMMu16_AVX512
+}
+
+
+# EMITTING VPEXPANDW (VPEXPANDW-512-1)
+{
+ICLASS:      VPEXPANDW
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_GSCAT MASK_VARIABLE_MEMOP
+PATTERN:    EVV 0x62 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1  NOEVSR  ESIZE_16_BITS() NELEM_GSCAT()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:zd:u16
+IFORM:       VPEXPANDW_ZMMu16_MASKmskw_MEMu16_AVX512
+}
+
+
+# EMITTING VPEXPANDW (VPEXPANDW-512-2)
+{
+ICLASS:      VPEXPANDW
+CPL:         3
+CATEGORY:    EXPAND
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x62 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1  NOEVSR
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_B3():r:zu16
+IFORM:       VPEXPANDW_ZMMu16_MASKmskw_ZMMu16_AVX512
+}
+
+
+# EMITTING VPSHLDD (VPSHLDD-128-1)
+{
+ICLASS:      VPSHLDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPSHLDD_XMMu32_MASKmskw_XMMu32_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHLDD_XMMu32_MASKmskw_XMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSHLDD (VPSHLDD-256-1)
+{
+ICLASS:      VPSHLDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPSHLDD_YMMu32_MASKmskw_YMMu32_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHLDD_YMMu32_MASKmskw_YMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSHLDD (VPSHLDD-512-1)
+{
+ICLASS:      VPSHLDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPSHLDD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHLDD_ZMMu32_MASKmskw_ZMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSHLDQ (VPSHLDQ-128-1)
+{
+ICLASS:      VPSHLDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPSHLDQ_XMMu64_MASKmskw_XMMu64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHLDQ_XMMu64_MASKmskw_XMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSHLDQ (VPSHLDQ-256-1)
+{
+ICLASS:      VPSHLDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPSHLDQ_YMMu64_MASKmskw_YMMu64_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHLDQ_YMMu64_MASKmskw_YMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSHLDQ (VPSHLDQ-512-1)
+{
+ICLASS:      VPSHLDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPSHLDQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHLDQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSHLDVD (VPSHLDVD-128-1)
+{
+ICLASS:      VPSHLDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSHLDVD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSHLDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSHLDVD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSHLDVD (VPSHLDVD-256-1)
+{
+ICLASS:      VPSHLDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPSHLDVD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPSHLDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSHLDVD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSHLDVD (VPSHLDVD-512-1)
+{
+ICLASS:      VPSHLDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPSHLDVD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPSHLDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSHLDVD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSHLDVQ (VPSHLDVQ-128-1)
+{
+ICLASS:      VPSHLDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSHLDVQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSHLDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSHLDVQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSHLDVQ (VPSHLDVQ-256-1)
+{
+ICLASS:      VPSHLDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPSHLDVQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPSHLDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSHLDVQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSHLDVQ (VPSHLDVQ-512-1)
+{
+ICLASS:      VPSHLDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x71 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPSHLDVQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPSHLDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x71 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSHLDVQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSHLDVW (VPSHLDVW-128-1)
+{
+ICLASS:      VPSHLDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSHLDVW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSHLDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():rw:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSHLDVW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSHLDVW (VPSHLDVW-256-1)
+{
+ICLASS:      VPSHLDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPSHLDVW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPSHLDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():rw:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPSHLDVW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSHLDVW (VPSHLDVW-512-1)
+{
+ICLASS:      VPSHLDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPSHLDVW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPSHLDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():rw:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPSHLDVW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSHLDW (VPSHLDW-128-1)
+{
+ICLASS:      VPSHLDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPSHLDW_XMMu16_MASKmskw_XMMu16_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16 IMM0:r:b
+IFORM:       VPSHLDW_XMMu16_MASKmskw_XMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHLDW (VPSHLDW-256-1)
+{
+ICLASS:      VPSHLDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16 IMM0:r:b
+IFORM:       VPSHLDW_YMMu16_MASKmskw_YMMu16_YMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16 IMM0:r:b
+IFORM:       VPSHLDW_YMMu16_MASKmskw_YMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHLDW (VPSHLDW-512-1)
+{
+ICLASS:      VPSHLDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x70 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16 IMM0:r:b
+IFORM:       VPSHLDW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHLDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x70 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16 IMM0:r:b
+IFORM:       VPSHLDW_ZMMu16_MASKmskw_ZMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDD (VPSHRDD-128-1)
+{
+ICLASS:      VPSHRDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32 IMM0:r:b
+IFORM:       VPSHRDD_XMMu32_MASKmskw_XMMu32_XMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHRDD_XMMu32_MASKmskw_XMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDD (VPSHRDD-256-1)
+{
+ICLASS:      VPSHRDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32 IMM0:r:b
+IFORM:       VPSHRDD_YMMu32_MASKmskw_YMMu32_YMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHRDD_YMMu32_MASKmskw_YMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDD (VPSHRDD-512-1)
+{
+ICLASS:      VPSHRDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32 IMM0:r:b
+IFORM:       VPSHRDD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0   UIMM8()  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHRDD_ZMMu32_MASKmskw_ZMMu32_MEMu32_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDQ (VPSHRDQ-128-1)
+{
+ICLASS:      VPSHRDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPSHRDQ_XMMu64_MASKmskw_XMMu64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHRDQ_XMMu64_MASKmskw_XMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDQ (VPSHRDQ-256-1)
+{
+ICLASS:      VPSHRDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPSHRDQ_YMMu64_MASKmskw_YMMu64_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHRDQ_YMMu64_MASKmskw_YMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDQ (VPSHRDQ-512-1)
+{
+ICLASS:      VPSHRDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPSHRDQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VPSHRDQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDVD (VPSHRDVD-128-1)
+{
+ICLASS:      VPSHRDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 REG3=XMM_B3():r:dq:u32
+IFORM:       VPSHRDVD_XMMu32_MASKmskw_XMMu32_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPSHRDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSHRDVD_XMMu32_MASKmskw_XMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSHRDVD (VPSHRDVD-256-1)
+{
+ICLASS:      VPSHRDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 REG3=YMM_B3():r:qq:u32
+IFORM:       VPSHRDVD_YMMu32_MASKmskw_YMMu32_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPSHRDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSHRDVD_YMMu32_MASKmskw_YMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSHRDVD (VPSHRDVD-512-1)
+{
+ICLASS:      VPSHRDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 REG3=ZMM_B3():r:zu32
+IFORM:       VPSHRDVD_ZMMu32_MASKmskw_ZMMu32_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPSHRDVD
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu32 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPSHRDVD_ZMMu32_MASKmskw_ZMMu32_MEMu32_AVX512
+}
+
+
+# EMITTING VPSHRDVQ (VPSHRDVQ-128-1)
+{
+ICLASS:      VPSHRDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 REG3=XMM_B3():r:dq:u64
+IFORM:       VPSHRDVQ_XMMu64_MASKmskw_XMMu64_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPSHRDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSHRDVQ_XMMu64_MASKmskw_XMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSHRDVQ (VPSHRDVQ-256-1)
+{
+ICLASS:      VPSHRDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 REG3=YMM_B3():r:qq:u64
+IFORM:       VPSHRDVQ_YMMu64_MASKmskw_YMMu64_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPSHRDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSHRDVQ_YMMu64_MASKmskw_YMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSHRDVQ (VPSHRDVQ-512-1)
+{
+ICLASS:      VPSHRDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x73 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 REG3=ZMM_B3():r:zu64
+IFORM:       VPSHRDVQ_ZMMu64_MASKmskw_ZMMu64_ZMMu64_AVX512
+}
+
+{
+ICLASS:      VPSHRDVQ
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x73 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1    ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zu64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu64 MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPSHRDVQ_ZMMu64_MASKmskw_ZMMu64_MEMu64_AVX512
+}
+
+
+# EMITTING VPSHRDVW (VPSHRDVW-128-1)
+{
+ICLASS:      VPSHRDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1
+OPERANDS:    REG0=XMM_R3():rw:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16
+IFORM:       VPSHRDVW_XMMu16_MASKmskw_XMMu16_XMMu16_AVX512
+}
+
+{
+ICLASS:      VPSHRDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x72 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():rw:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16
+IFORM:       VPSHRDVW_XMMu16_MASKmskw_XMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSHRDVW (VPSHRDVW-256-1)
+{
+ICLASS:      VPSHRDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1
+OPERANDS:    REG0=YMM_R3():rw:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16
+IFORM:       VPSHRDVW_YMMu16_MASKmskw_YMMu16_YMMu16_AVX512
+}
+
+{
+ICLASS:      VPSHRDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x72 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():rw:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16
+IFORM:       VPSHRDVW_YMMu16_MASKmskw_YMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSHRDVW (VPSHRDVW-512-1)
+{
+ICLASS:      VPSHRDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1
+OPERANDS:    REG0=ZMM_R3():rw:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16
+IFORM:       VPSHRDVW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_AVX512
+}
+
+{
+ICLASS:      VPSHRDVW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x72 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1    ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():rw:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16
+IFORM:       VPSHRDVW_ZMMu16_MASKmskw_ZMMu16_MEMu16_AVX512
+}
+
+
+# EMITTING VPSHRDW (VPSHRDW-128-1)
+{
+ICLASS:      VPSHRDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 REG3=XMM_B3():r:dq:u16 IMM0:r:b
+IFORM:       VPSHRDW_XMMu16_MASKmskw_XMMu16_XMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x72 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W1   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u16 MEM0:r:dq:u16 IMM0:r:b
+IFORM:       VPSHRDW_XMMu16_MASKmskw_XMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDW (VPSHRDW-256-1)
+{
+ICLASS:      VPSHRDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 REG3=YMM_B3():r:qq:u16 IMM0:r:b
+IFORM:       VPSHRDW_YMMu16_MASKmskw_YMMu16_YMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x72 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W1   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u16 MEM0:r:qq:u16 IMM0:r:b
+IFORM:       VPSHRDW_YMMu16_MASKmskw_YMMu16_MEMu16_IMM8_AVX512
+}
+
+
+# EMITTING VPSHRDW (VPSHRDW-512-1)
+{
+ICLASS:      VPSHRDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x72 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 REG3=ZMM_B3():r:zu16 IMM0:r:b
+IFORM:       VPSHRDW_ZMMu16_MASKmskw_ZMMu16_ZMMu16_IMM8_AVX512
+}
+
+{
+ICLASS:      VPSHRDW
+CPL:         3
+CATEGORY:    VBMI2
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VBMI2_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0x72 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W1   UIMM8()  ESIZE_16_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu16 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu16 MEM0:r:zd:u16 IMM0:r:b
+IFORM:       VPSHRDW_ZMMu16_MASKmskw_ZMMu16_MEMu16_IMM8_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/vnni/vnni-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VPDPBUSD (VPDPBUSD-128-1)
+{
+ICLASS:      VPDPBUSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x50 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u32
+IFORM:       VPDPBUSD_XMMi32_MASKmskw_XMMu8_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPBUSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPBUSD_XMMi32_MASKmskw_XMMu8_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPBUSD (VPDPBUSD-256-1)
+{
+ICLASS:      VPDPBUSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x50 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u32
+IFORM:       VPDPBUSD_YMMi32_MASKmskw_YMMu8_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPBUSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPBUSD_YMMi32_MASKmskw_YMMu8_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPBUSD (VPDPBUSD-512-1)
+{
+ICLASS:      VPDPBUSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x50 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu32
+IFORM:       VPDPBUSD_ZMMi32_MASKmskw_ZMMu8_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPBUSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x50 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPBUSD_ZMMi32_MASKmskw_ZMMu8_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPBUSDS (VPDPBUSDS-128-1)
+{
+ICLASS:      VPDPBUSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x51 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u32
+IFORM:       VPDPBUSDS_XMMi32_MASKmskw_XMMu8_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPBUSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x51 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPBUSDS_XMMi32_MASKmskw_XMMu8_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPBUSDS (VPDPBUSDS-256-1)
+{
+ICLASS:      VPDPBUSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x51 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u32
+IFORM:       VPDPBUSDS_YMMi32_MASKmskw_YMMu8_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPBUSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x51 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPBUSDS_YMMi32_MASKmskw_YMMu8_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPBUSDS (VPDPBUSDS-512-1)
+{
+ICLASS:      VPDPBUSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x51 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu32
+IFORM:       VPDPBUSDS_ZMMi32_MASKmskw_ZMMu8_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPBUSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x51 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPBUSDS_ZMMi32_MASKmskw_ZMMu8_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPWSSD (VPDPWSSD-128-1)
+{
+ICLASS:      VPDPWSSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x52 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:u32
+IFORM:       VPDPWSSD_XMMi32_MASKmskw_XMMi16_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPWSSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x52 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPWSSD_XMMi32_MASKmskw_XMMi16_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPWSSD (VPDPWSSD-256-1)
+{
+ICLASS:      VPDPWSSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x52 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:u32
+IFORM:       VPDPWSSD_YMMi32_MASKmskw_YMMi16_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPWSSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x52 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPWSSD_YMMi32_MASKmskw_YMMi16_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPWSSD (VPDPWSSD-512-1)
+{
+ICLASS:      VPDPWSSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x52 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zu32
+IFORM:       VPDPWSSD_ZMMi32_MASKmskw_ZMMi16_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPWSSD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x52 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPWSSD_ZMMi32_MASKmskw_ZMMi16_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPWSSDS (VPDPWSSDS-128-1)
+{
+ICLASS:      VPDPWSSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x53 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():rw:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 REG3=XMM_B3():r:dq:u32
+IFORM:       VPDPWSSDS_XMMi32_MASKmskw_XMMi16_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPWSSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x53 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():rw:dq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:i16 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPWSSDS_XMMi32_MASKmskw_XMMi16_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPWSSDS (VPDPWSSDS-256-1)
+{
+ICLASS:      VPDPWSSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x53 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():rw:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 REG3=YMM_B3():r:qq:u32
+IFORM:       VPDPWSSDS_YMMi32_MASKmskw_YMMi16_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPWSSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x53 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():rw:qq:i32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:i16 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPWSSDS_YMMi32_MASKmskw_YMMi16_MEMu32_AVX512
+}
+
+
+# EMITTING VPDPWSSDS (VPDPWSSDS-512-1)
+{
+ICLASS:      VPDPWSSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x53 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 REG3=ZMM_B3():r:zu32
+IFORM:       VPDPWSSDS_ZMMi32_MASKmskw_ZMMi16_ZMMu32_AVX512
+}
+
+{
+ICLASS:      VPDPWSSDS
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VNNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x53 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W0    ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():rw:zi32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zi16 MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPDPWSSDS_ZMMi32_MASKmskw_ZMMi16_MEMu32_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/gfni-vaes-vpcl/gfni-sse-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+INSTRUCTIONS()::
+# EMITTING GF2P8AFFINEINVQB (GF2P8AFFINEINVQB-N/A-1)
+{
+ICLASS:      GF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     GFNI
+EXCEPTIONS:     E4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x3A 0xCF MOD[0b11] MOD=3  REG[rrr] RM[nnn]  osz_refining_prefix     UIMM8()
+OPERANDS:    REG0=XMM_R():rw:dq:u8 REG1=XMM_B():r:dq:u64 IMM0:r:b
+IFORM:       GF2P8AFFINEINVQB_XMMu8_XMMu64_IMM8
+}
+
+{
+ICLASS:      GF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     GFNI
+EXCEPTIONS:     E4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x3A 0xCF MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  osz_refining_prefix     UIMM8()
+OPERANDS:    REG0=XMM_R():rw:dq:u8 MEM0:r:dq:u64 IMM0:r:b
+IFORM:       GF2P8AFFINEINVQB_XMMu8_MEMu64_IMM8
+}
+
+
+# EMITTING GF2P8AFFINEQB (GF2P8AFFINEQB-N/A-1)
+{
+ICLASS:      GF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     GFNI
+EXCEPTIONS:     E4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x3A 0xCE MOD[0b11] MOD=3  REG[rrr] RM[nnn]  osz_refining_prefix     UIMM8()
+OPERANDS:    REG0=XMM_R():rw:dq:u8 REG1=XMM_B():r:dq:u64 IMM0:r:b
+IFORM:       GF2P8AFFINEQB_XMMu8_XMMu64_IMM8
+}
+
+{
+ICLASS:      GF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     GFNI
+EXCEPTIONS:     E4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x3A 0xCE MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  osz_refining_prefix     UIMM8()
+OPERANDS:    REG0=XMM_R():rw:dq:u8 MEM0:r:dq:u64 IMM0:r:b
+IFORM:       GF2P8AFFINEQB_XMMu8_MEMu64_IMM8
+}
+
+
+# EMITTING GF2P8MULB (GF2P8MULB-N/A-1)
+{
+ICLASS:      GF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     GFNI
+EXCEPTIONS:     E4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x38 0xCF MOD[0b11] MOD=3  REG[rrr] RM[nnn]  osz_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:u8 REG1=XMM_B():r:dq:u8
+IFORM:       GF2P8MULB_XMMu8_XMMu8
+}
+
+{
+ICLASS:      GF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     GFNI
+EXCEPTIONS:     E4
+REAL_OPCODE: Y
+PATTERN:     0x0F 0x38 0xCF MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  osz_refining_prefix
+OPERANDS:    REG0=XMM_R():rw:dq:u8 MEM0:r:dq:u8
+IFORM:       GF2P8MULB_XMMu8_MEMu8
+}
+
+
+
+
+###FILE: ./datafiles/gfni-vaes-vpcl/gfni-evex-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VGF2P8AFFINEINVQB (VGF2P8AFFINEINVQB-128-1)
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCF V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_XMMu8_MASKmskw_XMMu8_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCF V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_XMMu8_MASKmskw_XMMu8_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VGF2P8AFFINEINVQB (VGF2P8AFFINEINVQB-256-1)
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCF V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_YMMu8_MASKmskw_YMMu8_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCF V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_YMMu8_MASKmskw_YMMu8_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VGF2P8AFFINEINVQB (VGF2P8AFFINEINVQB-512-1)
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCF V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_ZMMu8_MASKmskw_ZMMu8_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCF V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_ZMMu8_MASKmskw_ZMMu8_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VGF2P8AFFINEQB (VGF2P8AFFINEQB-128-1)
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCE V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_XMMu8_MASKmskw_XMMu8_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCE V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_XMMu8_MASKmskw_XMMu8_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VGF2P8AFFINEQB (VGF2P8AFFINEQB-256-1)
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCE V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_YMMu8_MASKmskw_YMMu8_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCE V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_YMMu8_MASKmskw_YMMu8_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VGF2P8AFFINEQB (VGF2P8AFFINEQB-512-1)
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCE V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W1   UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_ZMMu8_MASKmskw_ZMMu8_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0xCE V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL512  W1   UIMM8()  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:vv:u64:TXT=BCASTSTR IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_ZMMu8_MASKmskw_ZMMu8_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VGF2P8MULB (VGF2P8MULB-128-1)
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 REG3=XMM_B3():r:dq:u8
+IFORM:       VGF2P8MULB_XMMu8_MASKmskw_XMMu8_XMMu8_AVX512
+}
+
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xCF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_N3():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VGF2P8MULB_XMMu8_MASKmskw_XMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VGF2P8MULB (VGF2P8MULB-256-1)
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 REG3=YMM_B3():r:qq:u8
+IFORM:       VGF2P8MULB_YMMu8_MASKmskw_YMMu8_YMMu8_AVX512
+}
+
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xCF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_N3():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VGF2P8MULB_YMMu8_MASKmskw_YMMu8_MEMu8_AVX512
+}
+
+
+# EMITTING VGF2P8MULB (VGF2P8MULB-512-1)
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0xCF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512  W0
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 REG3=ZMM_B3():r:zu8
+IFORM:       VGF2P8MULB_ZMMu8_MASKmskw_ZMMu8_ZMMu8_AVX512
+}
+
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_GFNI_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULLMEM
+PATTERN:    EVV 0xCF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512  W0    ESIZE_8_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu8 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=ZMM_N3():r:zu8 MEM0:r:zd:u8
+IFORM:       VGF2P8MULB_ZMMu8_MASKmskw_ZMMu8_MEMu8_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/gfni-vaes-vpcl/gfni-vex-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+AVX_INSTRUCTIONS()::
+# EMITTING VGF2P8AFFINEINVQB (VGF2P8AFFINEINVQB-128-2)
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCF V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_XMMu8_XMMu8_XMMu64_IMM8
+}
+
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCF V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_XMMu8_XMMu8_MEMu64_IMM8
+}
+
+
+# EMITTING VGF2P8AFFINEINVQB (VGF2P8AFFINEINVQB-256-2)
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCF V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_YMMu8_YMMu8_YMMu64_IMM8
+}
+
+{
+ICLASS:      VGF2P8AFFINEINVQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCF V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEINVQB_YMMu8_YMMu8_MEMu64_IMM8
+}
+
+
+# EMITTING VGF2P8AFFINEQB (VGF2P8AFFINEQB-128-2)
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCE V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_XMMu8_XMMu8_XMMu64_IMM8
+}
+
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCE V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1   UIMM8()
+OPERANDS:    REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_XMMu8_XMMu8_MEMu64_IMM8
+}
+
+
+# EMITTING VGF2P8AFFINEQB (VGF2P8AFFINEQB-256-2)
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCE V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_YMMu8_YMMu8_YMMu64_IMM8
+}
+
+{
+ICLASS:      VGF2P8AFFINEQB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCE V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1   UIMM8()
+OPERANDS:    REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u64 IMM0:r:b
+IFORM:       VGF2P8AFFINEQB_YMMu8_YMMu8_MEMu64_IMM8
+}
+
+
+# EMITTING VGF2P8MULB (VGF2P8MULB-128-2)
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCF V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL128  W0
+OPERANDS:    REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 REG2=XMM_B():r:dq:u8
+IFORM:       VGF2P8MULB_XMMu8_XMMu8_XMMu8
+}
+
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0
+OPERANDS:    REG0=XMM_R():w:dq:u8 REG1=XMM_N():r:dq:u8 MEM0:r:dq:u8
+IFORM:       VGF2P8MULB_XMMu8_XMMu8_MEMu8
+}
+
+
+# EMITTING VGF2P8MULB (VGF2P8MULB-256-2)
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCF V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL256  W0
+OPERANDS:    REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 REG2=YMM_B():r:qq:u8
+IFORM:       VGF2P8MULB_YMMu8_YMMu8_YMMu8
+}
+
+{
+ICLASS:      VGF2P8MULB
+CPL:         3
+CATEGORY:    GFNI
+EXTENSION:   GFNI
+ISA_SET:     AVX_GFNI
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xCF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0
+OPERANDS:    REG0=YMM_R():w:qq:u8 REG1=YMM_N():r:qq:u8 MEM0:r:qq:u8
+IFORM:       VGF2P8MULB_YMMu8_YMMu8_MEMu8
+}
+
+
+
+
+###FILE: ./datafiles/gfni-vaes-vpcl/vaes-evex-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VAESDEC (VAESDEC-128-1)
+{
+ICLASS:      VAESDEC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u128 REG2=XMM_B3():r:dq:u128
+IFORM:       VAESDEC_XMMu128_XMMu128_XMMu128_AVX512
+}
+
+{
+ICLASS:      VAESDEC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u128 MEM0:r:dq:u128
+IFORM:       VAESDEC_XMMu128_XMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESDEC (VAESDEC-256-1)
+{
+ICLASS:      VAESDEC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u128 REG2=YMM_B3():r:qq:u128
+IFORM:       VAESDEC_YMMu128_YMMu128_YMMu128_AVX512
+}
+
+{
+ICLASS:      VAESDEC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u128 MEM0:r:qq:u128
+IFORM:       VAESDEC_YMMu128_YMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESDEC (VAESDEC-512-1)
+{
+ICLASS:      VAESDEC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDE V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu128 REG2=ZMM_B3():r:zu128
+IFORM:       VAESDEC_ZMMu128_ZMMu128_ZMMu128_AVX512
+}
+
+{
+ICLASS:      VAESDEC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu128 MEM0:r:zd:u128
+IFORM:       VAESDEC_ZMMu128_ZMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESDECLAST (VAESDECLAST-128-1)
+{
+ICLASS:      VAESDECLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u128 REG2=XMM_B3():r:dq:u128
+IFORM:       VAESDECLAST_XMMu128_XMMu128_XMMu128_AVX512
+}
+
+{
+ICLASS:      VAESDECLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u128 MEM0:r:dq:u128
+IFORM:       VAESDECLAST_XMMu128_XMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESDECLAST (VAESDECLAST-256-1)
+{
+ICLASS:      VAESDECLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u128 REG2=YMM_B3():r:qq:u128
+IFORM:       VAESDECLAST_YMMu128_YMMu128_YMMu128_AVX512
+}
+
+{
+ICLASS:      VAESDECLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u128 MEM0:r:qq:u128
+IFORM:       VAESDECLAST_YMMu128_YMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESDECLAST (VAESDECLAST-512-1)
+{
+ICLASS:      VAESDECLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDF V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu128 REG2=ZMM_B3():r:zu128
+IFORM:       VAESDECLAST_ZMMu128_ZMMu128_ZMMu128_AVX512
+}
+
+{
+ICLASS:      VAESDECLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu128 MEM0:r:zd:u128
+IFORM:       VAESDECLAST_ZMMu128_ZMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESENC (VAESENC-128-1)
+{
+ICLASS:      VAESENC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u128 REG2=XMM_B3():r:dq:u128
+IFORM:       VAESENC_XMMu128_XMMu128_XMMu128_AVX512
+}
+
+{
+ICLASS:      VAESENC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u128 MEM0:r:dq:u128
+IFORM:       VAESENC_XMMu128_XMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESENC (VAESENC-256-1)
+{
+ICLASS:      VAESENC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u128 REG2=YMM_B3():r:qq:u128
+IFORM:       VAESENC_YMMu128_YMMu128_YMMu128_AVX512
+}
+
+{
+ICLASS:      VAESENC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u128 MEM0:r:qq:u128
+IFORM:       VAESENC_YMMu128_YMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESENC (VAESENC-512-1)
+{
+ICLASS:      VAESENC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDC V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu128 REG2=ZMM_B3():r:zu128
+IFORM:       VAESENC_ZMMu128_ZMMu128_ZMMu128_AVX512
+}
+
+{
+ICLASS:      VAESENC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu128 MEM0:r:zd:u128
+IFORM:       VAESENC_ZMMu128_ZMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESENCLAST (VAESENCLAST-128-1)
+{
+ICLASS:      VAESENCLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0 MASK=0
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u128 REG2=XMM_B3():r:dq:u128
+IFORM:       VAESENCLAST_XMMu128_XMMu128_XMMu128_AVX512
+}
+
+{
+ICLASS:      VAESENCLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u128 MEM0:r:dq:u128
+IFORM:       VAESENCLAST_XMMu128_XMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESENCLAST (VAESENCLAST-256-1)
+{
+ICLASS:      VAESENCLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0 MASK=0
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u128 REG2=YMM_B3():r:qq:u128
+IFORM:       VAESENCLAST_YMMu128_YMMu128_YMMu128_AVX512
+}
+
+{
+ICLASS:      VAESENCLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u128 MEM0:r:qq:u128
+IFORM:       VAESENCLAST_YMMu128_YMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VAESENCLAST (VAESENCLAST-512-1)
+{
+ICLASS:      VAESENCLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0xDD V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0 MASK=0
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu128 REG2=ZMM_B3():r:zu128
+IFORM:       VAESENCLAST_ZMMu128_ZMMu128_ZMMu128_AVX512
+}
+
+{
+ICLASS:      VAESENCLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VAES_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0xDD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0 MASK=0  ESIZE_128_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu128 MEM0:r:zd:u128
+IFORM:       VAESENCLAST_ZMMu128_ZMMu128_MEMu128_AVX512
+}
+
+
+# EMITTING VPCLMULQDQ (VPCLMULQDQ-128-1)
+{
+ICLASS:      VPCLMULQDQ
+CPL:         3
+CATEGORY:    VPCLMULQDQ
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPCLMULQDQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0x44 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u64 REG2=XMM_B3():r:dq:u64 IMM0:r:b
+IFORM:       VPCLMULQDQ_XMMu128_XMMu64_XMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCLMULQDQ
+CPL:         3
+CATEGORY:    VPCLMULQDQ
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPCLMULQDQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x44 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL128      ZEROING=0 MASK=0 UIMM8()  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=XMM_R3():w:dq:u128 REG1=XMM_N3():r:dq:u64 MEM0:r:dq:u64 IMM0:r:b
+IFORM:       VPCLMULQDQ_XMMu128_XMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPCLMULQDQ (VPCLMULQDQ-256-1)
+{
+ICLASS:      VPCLMULQDQ
+CPL:         3
+CATEGORY:    VPCLMULQDQ
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPCLMULQDQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0x44 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u64 REG2=YMM_B3():r:qq:u64 IMM0:r:b
+IFORM:       VPCLMULQDQ_YMMu128_YMMu64_YMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCLMULQDQ
+CPL:         3
+CATEGORY:    VPCLMULQDQ
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPCLMULQDQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x44 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL256      ZEROING=0 MASK=0 UIMM8()  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=YMM_R3():w:qq:u128 REG1=YMM_N3():r:qq:u64 MEM0:r:qq:u64 IMM0:r:b
+IFORM:       VPCLMULQDQ_YMMu128_YMMu64_MEMu64_IMM8_AVX512
+}
+
+
+# EMITTING VPCLMULQDQ (VPCLMULQDQ-512-1)
+{
+ICLASS:      VPCLMULQDQ
+CPL:         3
+CATEGORY:    VPCLMULQDQ
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPCLMULQDQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+PATTERN:    EVV 0x44 V66 V0F3A MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL512      ZEROING=0 MASK=0 UIMM8()
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu64 REG2=ZMM_B3():r:zu64 IMM0:r:b
+IFORM:       VPCLMULQDQ_ZMMu128_ZMMu64_ZMMu64_IMM8_AVX512
+}
+
+{
+ICLASS:      VPCLMULQDQ
+CPL:         3
+CATEGORY:    VPCLMULQDQ
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPCLMULQDQ_512
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  DISP8_FULLMEM
+PATTERN:    EVV 0x44 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn] BCRC=0 MODRM()  VL512      ZEROING=0 MASK=0 UIMM8()  ESIZE_64_BITS() NELEM_FULLMEM()
+OPERANDS:    REG0=ZMM_R3():w:zu128 REG1=ZMM_N3():r:zu64 MEM0:r:zd:u64 IMM0:r:b
+IFORM:       VPCLMULQDQ_ZMMu128_ZMMu64_MEMu64_IMM8_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/gfni-vaes-vpcl/vaes-vex-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+AVX_INSTRUCTIONS()::
+# EMITTING VAESDEC (VAESDEC-256-2)
+{
+ICLASS:      VAESDEC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   VAES
+ISA_SET:     VAES
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xDE V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 REG2=YMM_B():r:qq:u128
+IFORM:       VAESDEC_YMMu128_YMMu128_YMMu128
+}
+
+{
+ICLASS:      VAESDEC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   VAES
+ISA_SET:     VAES
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xDE V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 MEM0:r:qq:u128
+IFORM:       VAESDEC_YMMu128_YMMu128_MEMu128
+}
+
+
+# EMITTING VAESDECLAST (VAESDECLAST-256-2)
+{
+ICLASS:      VAESDECLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   VAES
+ISA_SET:     VAES
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xDF V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 REG2=YMM_B():r:qq:u128
+IFORM:       VAESDECLAST_YMMu128_YMMu128_YMMu128
+}
+
+{
+ICLASS:      VAESDECLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   VAES
+ISA_SET:     VAES
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xDF V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 MEM0:r:qq:u128
+IFORM:       VAESDECLAST_YMMu128_YMMu128_MEMu128
+}
+
+
+# EMITTING VAESENC (VAESENC-256-2)
+{
+ICLASS:      VAESENC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   VAES
+ISA_SET:     VAES
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xDC V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 REG2=YMM_B():r:qq:u128
+IFORM:       VAESENC_YMMu128_YMMu128_YMMu128
+}
+
+{
+ICLASS:      VAESENC
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   VAES
+ISA_SET:     VAES
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xDC V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 MEM0:r:qq:u128
+IFORM:       VAESENC_YMMu128_YMMu128_MEMu128
+}
+
+
+# EMITTING VAESENCLAST (VAESENCLAST-256-2)
+{
+ICLASS:      VAESENCLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   VAES
+ISA_SET:     VAES
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xDD V66 V0F38 MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL256
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 REG2=YMM_B():r:qq:u128
+IFORM:       VAESENCLAST_YMMu128_YMMu128_YMMu128
+}
+
+{
+ICLASS:      VAESENCLAST
+CPL:         3
+CATEGORY:    VAES
+EXTENSION:   VAES
+ISA_SET:     VAES
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0xDD V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u128 MEM0:r:qq:u128
+IFORM:       VAESENCLAST_YMMu128_YMMu128_MEMu128
+}
+
+
+# EMITTING VPCLMULQDQ (VPCLMULQDQ-256-2)
+{
+ICLASS:      VPCLMULQDQ
+CPL:         3
+CATEGORY:    VPCLMULQDQ
+EXTENSION:   VPCLMULQDQ
+ISA_SET:     VPCLMULQDQ
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0x44 V66 V0F3A MOD[0b11] MOD=3  REG[rrr] RM[nnn]  VL256     UIMM8()
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u64 REG2=YMM_B():r:qq:u64 IMM0:r:b
+IFORM:       VPCLMULQDQ_YMMu128_YMMu64_YMMu64_IMM8
+}
+
+{
+ICLASS:      VPCLMULQDQ
+CPL:         3
+CATEGORY:    VPCLMULQDQ
+EXTENSION:   VPCLMULQDQ
+ISA_SET:     VPCLMULQDQ
+EXCEPTIONS:     avx-type-4
+REAL_OPCODE: Y
+PATTERN:    VV1 0x44 V66 V0F3A MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256     UIMM8()
+OPERANDS:    REG0=YMM_R():w:qq:u128 REG1=YMM_N():r:qq:u64 MEM0:r:qq:u64 IMM0:r:b
+IFORM:       VPCLMULQDQ_YMMu128_YMMu64_MEMu64_IMM8
+}
+
+
+
+
+###FILE: ./datafiles/vpopcntdq-vl/vpopcntdq-vl-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+EVEX_INSTRUCTIONS()::
+# EMITTING VPOPCNTD (VPOPCNTD-128-1)
+{
+ICLASS:      VPOPCNTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W0  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u32
+IFORM:       VPOPCNTD_XMMu32_MASKmskw_XMMu32_AVX512
+}
+
+{
+ICLASS:      VPOPCNTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPOPCNTD_XMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPOPCNTD (VPOPCNTD-256-1)
+{
+ICLASS:      VPOPCNTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W0  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u32
+IFORM:       VPOPCNTD_YMMu32_MASKmskw_YMMu32_AVX512
+}
+
+{
+ICLASS:      VPOPCNTD
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W0  NOEVSR  ESIZE_32_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u32 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u32:TXT=BCASTSTR
+IFORM:       VPOPCNTD_YMMu32_MASKmskw_MEMu32_AVX512
+}
+
+
+# EMITTING VPOPCNTQ (VPOPCNTQ-128-1)
+{
+ICLASS:      VPOPCNTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL128  W1  NOEVSR
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=XMM_B3():r:dq:u64
+IFORM:       VPOPCNTQ_XMMu64_MASKmskw_XMMu64_AVX512
+}
+
+{
+ICLASS:      VPOPCNTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_128
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL128  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=XMM_R3():w:dq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPOPCNTQ_XMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+# EMITTING VPOPCNTQ (VPOPCNTQ-256-1)
+{
+ICLASS:      VPOPCNTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MASKOP_EVEX
+PATTERN:    EVV 0x55 V66 V0F38 MOD[0b11] MOD=3 BCRC=0 REG[rrr] RM[nnn]  VL256  W1  NOEVSR
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR REG2=YMM_B3():r:qq:u64
+IFORM:       VPOPCNTQ_YMMu64_MASKmskw_YMMu64_AVX512
+}
+
+{
+ICLASS:      VPOPCNTQ
+CPL:         3
+CATEGORY:    AVX512
+EXTENSION:   AVX512EVEX
+ISA_SET:     AVX512_VPOPCNTDQ_256
+EXCEPTIONS:     AVX512-E4
+REAL_OPCODE: Y
+ATTRIBUTES:  MEMORY_FAULT_SUPPRESSION MASKOP_EVEX DISP8_FULL BROADCAST_ENABLED
+PATTERN:    EVV 0x55 V66 V0F38 MOD[mm] MOD!=3 REG[rrr] RM[nnn]  MODRM()  VL256  W1  NOEVSR  ESIZE_64_BITS() NELEM_FULL()
+OPERANDS:    REG0=YMM_R3():w:qq:u64 REG1=MASK1():r:mskw:TXT=ZEROSTR MEM0:r:vv:u64:TXT=BCASTSTR
+IFORM:       VPOPCNTQ_YMMu64_MASKmskw_MEMu64_AVX512
+}
+
+
+
+
+###FILE: ./datafiles/rdpid/rdpid-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2017 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#    ***** GENERATED FILE -- DO NOT EDIT! *****
+#
+#
+#
+INSTRUCTIONS()::
+# EMITTING RDPID (RDPID-N/A-1-32)
+{
+ICLASS:      RDPID
+CPL:         3
+CATEGORY:    RDPID
+EXTENSION:   RDPID
+ISA_SET:     RDPID
+REAL_OPCODE: N
+PATTERN:     0x0F 0xC7 MOD[0b11] MOD=3  REG[0b111] RM[nnn]  f3_refining_prefix    not64
+OPERANDS:    REG0=GPR32_B():r:d:u32 REG1=XED_REG_TSCAUX:r:SUPP:d:u32
+IFORM:       RDPID_GPR32u32
+}
+
+
+# EMITTING RDPID (RDPID-N/A-1-64)
+{
+ICLASS:      RDPID
+CPL:         3
+CATEGORY:    RDPID
+EXTENSION:   RDPID
+ISA_SET:     RDPID
+REAL_OPCODE: N
+PATTERN:     0x0F 0xC7 MOD[0b11] MOD=3  REG[0b111] RM[nnn]  f3_refining_prefix   mode64
+OPERANDS:    REG0=GPR64_B():r:q:u64 REG1=XED_REG_TSCAUX:r:SUPP:d:u32
+IFORM:       RDPID_GPR64u64
+}
+
+
+
+
+###FILE: ./datafiles/pt/intelpt-isa.xed.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+
+INSTRUCTIONS()::
+{
+ICLASS    : PTWRITE
+CPL       : 3
+CATEGORY  : PT
+EXTENSION : PT
+PATTERN   : 0x0F 0xAE MOD[0b11] MOD=3 REG[0b100]  RM[nnn] f3_refining_prefix  no66_prefix
+OPERANDS  : REG0=GPRy_B():r
+PATTERN   : 0x0F 0xAE MOD[mm]   MOD!=3 REG[0b100] RM[nnn] f3_refining_prefix no66_prefix MODRM()
+OPERANDS  : MEM0:r:y
+
+}
diff --git a/x86/x86avxgen/testdata/xedpath/all-element-types.txt b/x86/x86avxgen/testdata/xedpath/all-element-types.txt
new file mode 100644
index 0000000..876ddbd
--- /dev/null
+++ b/x86/x86avxgen/testdata/xedpath/all-element-types.txt
@@ -0,0 +1,69 @@
+
+
+###FILE: ./datafiles/xed-operand-types.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#
+#XTYPE       TYPE   BITS-PER-ELEM
+#
+var      VARIABLE    0  # instruction must set NELEM and  ELEMENT_SIZE
+struct     STRUCT    0  # many elements of different widths
+int           INT    0  # one element, all the bits, width varies
+uint         UINT    0  # one element, all the bits, width varies
+#
+i1            INT    1
+i8            INT    8
+i16           INT   16
+i32           INT   32
+i64           INT   64
+u8           UINT    8
+u16          UINT   16
+u32          UINT   32
+u64          UINT   64
+u128         UINT  128
+u256         UINT  256
+f32        SINGLE   32
+f64        DOUBLE   64
+f80    LONGDOUBLE   80
+b80       LONGBCD   80
+
+
+###FILE: ./datafiles/ivbavx/fp16-operand-types.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#XTYPE       TYPE   BITS-PER-ELEM
+f16        FLOAT16  16
+
diff --git a/x86/x86avxgen/testdata/xedpath/all-state.txt b/x86/x86avxgen/testdata/xedpath/all-state.txt
new file mode 100644
index 0000000..b2fc642
--- /dev/null
+++ b/x86/x86avxgen/testdata/xedpath/all-state.txt
@@ -0,0 +1,341 @@
+
+
+###FILE: ./datafiles/xed-state-bits.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+###########################################################################
+## file: xed-state-bits.txt
+###########################################################################
+
+# These are just shorthand for some conditions or captures.
+# Simple macro replacement
+#all_modes             ALL_MODES=1
+not64                  MODE!=2
+mode64                 MODE=2
+mode32                 MODE=1
+mode16                 MODE=0
+
+# effective addressing mode
+eanot16               EASZ!=1
+eamode16              EASZ=1
+eamode32              EASZ=2
+eamode64              EASZ=3
+
+# stack addressing mode
+smode16               SMODE=0
+smode32               SMODE=1
+smode64               SMODE=2
+
+eosz8                 EOSZ=0
+eosz16                EOSZ=1
+not_eosz16            EOSZ!=1
+eosz32                EOSZ=2
+eosz64                EOSZ=3
+eosznot64             EOSZ!=3
+
+
+# for OD expansion in graph partitioning FIXME
+mod0                MOD=0
+mod1                MOD=1
+mod2                MOD=2
+mod3                MOD=3
+
+rex_reqd                REX=1
+no_rex                  REX=0
+reset_rex              REX=0 REXW=0 REXB=0 REXR=0 REXX=0
+
+rexb_prefix             REXB=1
+rexx_prefix             REXX=1
+rexr_prefix             REXR=1
+
+# 2013-09-25 FIXME: we were inconsistent. some things use W0/W1, some
+# use the more verbose form. We should converge on W0/W1.
+#
+rexw_prefix            REXW=1 SKIP_OSZ=1
+norexw_prefix          REXW=0 SKIP_OSZ=1
+W1                     REXW=1 SKIP_OSZ=1
+W0                     REXW=0 SKIP_OSZ=1
+
+norexb_prefix             REXB=0
+norexx_prefix             REXX=0
+norexr_prefix             REXR=0
+############################################################3333
+f2_prefix              REP=2  # REPNZ, REPNE
+f3_prefix              REP=3  # REPZ,  REPE
+repne                  REP=2
+repe                   REP=3
+norep                  REP=0
+66_prefix              OSZ=1
+nof3_prefix            REP!=3
+no66_prefix            OSZ=0
+not_refining           REP=0 # dummy setting for state values 2007-08-06 FIXME
+refining_f2            REP=2
+refining_f3            REP=3
+not_refining_f3        REP!=3 # for pause vs xchg
+no_refining_prefix     REP=0 OSZ=0  # critical:REP must  be first for decoding partitioning
+osz_refining_prefix    REP=0 OSZ=1
+f2_refining_prefix     REP=2
+f3_refining_prefix     REP=3
+
+no67_prefix             ASZ=0
+67_prefix               ASZ=1
+
+lock_prefix             LOCK=1
+nolock_prefix           LOCK=0
+
+default_ds              DEFAULT_SEG=0
+default_ss              DEFAULT_SEG=1
+default_es              DEFAULT_SEG=2 # for string ops
+no_seg_prefix           SEG_OVD=0
+some_seg_prefix         SEG_OVD!=0
+cs_prefix               SEG_OVD=1
+ds_prefix               SEG_OVD=2
+es_prefix               SEG_OVD=3
+fs_prefix               SEG_OVD=4
+gs_prefix               SEG_OVD=5
+ss_prefix               SEG_OVD=6
+
+# default (or not) to 64b width in 64b mode
+nrmw                   DF64=0
+df64                   DF64=1
+
+# default choice for encoder when there are multiple choices for a
+# nonterminal. The ISA is not uniquely determined for encoding so we
+# must express preferences for certain forms!
+enc                    ENCODER_PREFERRED=1
+
+# for the legacy prefix encoder, tell it to keep trying rules and not
+# return after successfully finding one that applies
+no_return              NO_RETURN=1
+
+# indicate an encoding or decoding error occurred
+error                  ERROR=XED_ERROR_GENERAL_ERROR
+
+# dummy constraint which always satisfies
+true                   DUMMY=0
+
+
+###FILE: ./datafiles/amdxop/xop-state-bits.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+XMAP8  MAP=8
+XMAP9  MAP=9
+XMAPA  MAP=10
+
+XOPV   VEXVALID=3
+
+
+###FILE: ./datafiles/avx/avx-state-bits.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+VL128  VL=0
+VL256  VL=1
+
+VV1    VEXVALID=1
+VV0    VEXVALID=0
+
+VMAP0  MAP=0
+V0F    MAP=1
+V0F38  MAP=2
+V0F3A  MAP=3
+
+VNP    VEX_PREFIX=0
+V66    VEX_PREFIX=1
+VF2    VEX_PREFIX=2
+VF3    VEX_PREFIX=3
+
+# No VEX-SPECIFIED-REGISTER
+NOVSR  VEXDEST3=0b1 VEXDEST210=0b111
+
+EMX_BROADCAST_1TO4_32  BCAST=10   #     128
+EMX_BROADCAST_1TO4_64  BCAST=13   #     256
+EMX_BROADCAST_1TO8_32  BCAST=3    #     256
+EMX_BROADCAST_2TO4_64  BCAST=20   #     256
+
+
+
+###FILE: ./datafiles/avxhsw/hsw-state-bits.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+
+EMX_BROADCAST_1TO2_64  BCAST=11   #         128
+EMX_BROADCAST_1TO8_16   BCAST=14  #         128
+EMX_BROADCAST_1TO16_16  BCAST=15  #     256
+EMX_BROADCAST_1TO16_8   BCAST=17  #         128
+EMX_BROADCAST_1TO32_8   BCAST=18  #     256
+
+
+
+
+###FILE: /home/quasilyte/CODE/intel/xed/datafiles/knc/uisa-state-bits.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+VL512 VL=2
+VLBAD VL=3 # unused VL setting to cause things not to decode.
+
+# KNC EVEX is KVV
+#
+KVV   VEXVALID=4
+
+# No VEX-SPECIFIED-REGISTER
+NOEVSR  VEXDEST3=0b1 VEXDEST210=0b111 VEXDEST4=0b0
+
+# No VEX-SPECIFIED-REGISTER for GATHERS/SCATTERS -- index reg 5th bit is VEXTDEST4
+NO_SPARSE_EVSR  VEXDEST3=0b1 VEXDEST210=0b111
+
+# These conflict w/another chip ... so if you ever build a combo
+#  model you'll have to remove these somehow.
+#
+EMX_BROADCAST_1TO16_32 BCAST=1      # 512
+EMX_BROADCAST_4TO16_32 BCAST=2      # 512
+EMX_BROADCAST_1TO8_64  BCAST=5      # 512
+EMX_BROADCAST_4TO8_64  BCAST=6      # 512
+
+
+
+###FILE: ./datafiles/avx512f/avx512-state-bits.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+EVV VEXVALID=2
+
+EMX_BROADCAST_1TO16_32 BCAST=1      # 512
+EMX_BROADCAST_4TO16_32 BCAST=2      # 512
+EMX_BROADCAST_1TO8_64  BCAST=5      # 512
+EMX_BROADCAST_4TO8_64  BCAST=6      # 512
+EMX_BROADCAST_2TO16_32 BCAST=7      # 512
+EMX_BROADCAST_2TO8_64  BCAST=8      # 512
+EMX_BROADCAST_8TO16_32 BCAST=9      # 512
+EMX_BROADCAST_1TO32_16  BCAST=16  # 512
+EMX_BROADCAST_1TO64_8   BCAST=19  # 512
+# these do not show up on earlier processors
+EMX_BROADCAST_4TO8_32  BCAST=4      #     256
+EMX_BROADCAST_2TO4_32  BCAST=12   #         128
+EMX_BROADCAST_2TO8_32  BCAST=21   #     256
+EMX_BROADCAST_1TO2_32  BCAST=22   #     128
+
+
+###FILE: ./datafiles/avx512-skx/skx-state-bits.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+
+EMX_BROADCAST_1TO2_8   BCAST=23
+EMX_BROADCAST_1TO4_8   BCAST=24
+EMX_BROADCAST_1TO8_8   BCAST=25
+
+EMX_BROADCAST_1TO2_16   BCAST=26
+EMX_BROADCAST_1TO4_16   BCAST=27
diff --git a/x86/x86avxgen/testdata/xedpath/all-widths.txt b/x86/x86avxgen/testdata/xedpath/all-widths.txt
new file mode 100644
index 0000000..11b6ba7
--- /dev/null
+++ b/x86/x86avxgen/testdata/xedpath/all-widths.txt
@@ -0,0 +1,224 @@
+
+
+###FILE: ./datafiles/xed-operand-width.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+# @file xed-operand-width.txt
+
+# the default xtype can be overridden in each operand using a ":" followed by an explicit xtype
+##
+## the width defaults to bytes. But it can be bits if it has a "bits" suffix
+##
+#
+#            default
+#oc2-code    XTYPE     width16 width32 width64 (if only one width is shown, it is for all widths)
+#
+INVALID     INVALID   0
+#
+#  3 strange things:
+#
+asz         int       2 4 8  # varies with the effective address width
+ssz         int       2 4 8  # varies with the stack address width
+pseudo      struct    0      # these are for unusual registers
+pseudox87   struct    0      # these are for unusual registers
+#
+#
+#
+#1          i1        1 # FIXME: this is not used...
+a16         i16       4  # bound
+a32         i32       8  # bound
+b           u8        1
+d           i32       4
+#
+i8          i8        1
+u8          u8        1
+i16         i16       2
+u16         u16       2
+i32         i32       4
+u32         u32       4
+i64         i64       8
+u64         u64       8
+f16         f16       2  # IVB converts
+f32         f32       4
+f64         f64       8
+#
+dq          i32      16
+#
+xub        u8        16
+xuw        u16       16
+xud        u32       16
+xuq        u64       16
+x128       u128      16
+#
+xb          i8       16
+xw          i16      16
+xd          i32      16
+xq          i64      16
+#
+#
+mb          i8        8
+mw          i16       8
+md          i32       8
+mq          i64       8
+#
+m64int      i64       8
+m64real     f64       8
+mem108      struct  108
+mem14       struct   14
+mem16       struct    2
+mem16int    i16       2
+mem28       struct   28
+mem32int    i32       4
+mem32real   f32       4
+mem80dec    b80      10
+mem80real   f80      10
+f80         f80      10 # for X87 registers:
+mem94       struct   94
+mfpxenv     struct  512
+mxsave      struct  576
+mprefetch   i64      64 # made up width for prefetches
+p           struct    4    6   6
+p2          struct    4    6  10
+pd          f64      16
+ps          f32      16
+pi          i32       8
+q           i64       8
+s           struct    6    6  10
+s64         struct   10
+sd          f64       8
+si          i32       4
+ss          f32       4
+v           int       2    4   8
+y           int       4    4   8
+w           i16       2
+z           int       2    4   4
+spw8        int      16   32   0  # varies (64b invalid)         STACK POINTER WIDTH
+spw         int       2    4   8  # varies                       STACK POINTER WIDTH
+spw5        int      10   20  40  # varies (IRET approx)         STACK POINTER WIDTH
+spw3        int       6   12  24  # varies (IRET approx)         STACK POINTER WIDTH
+spw2        int       4    8  16  # varies (FAR call/ret approx) STACK POINTER WIDTH
+i1          int   1bits
+i2          int   2bits
+i3          int   3bits
+i4          int   4bits
+i5          int   5bits
+i6          int   6bits
+i7          int   7bits
+i8          int   8bits
+var         var      0  # relies on NELEM * ELEMENT_SIZE to get the number of bits.
+bnd32       u32     12  # MPX 32b BNDLDX/BNDSTX memop 3x4B
+bnd64       u64     24  # MPX 32b BNDLDX/BNDSTX memop 3x8B
+
+
+###FILE: ./datafiles/avx/avx-operand-width.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#code      XTYPE     width16 width32 width64 (if only one width is presented, it is for all widths)
+#
+qq          i32     32
+yub         u8      32
+yuw         u16     32
+yud         u32     32
+yuq         u64     32
+y128        u128    32
+
+yb          i8      32
+yw          i16     32
+yd          i32     32
+yq          i64     32
+
+yps         f32     32
+ypd         f64     32
+
+
+
+
+###FILE: ./datafiles/avx512f/avx512-operand-widths.txt
+
+#BEGIN_LEGAL
+#
+#Copyright (c) 2016 Intel Corporation
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#END_LEGAL
+#
+#code      XTYPE     width16 width32 width64 (if only one width is presented, it is for all widths)
+#
+vv          var      0 # relies on nelem * elem_size
+zv          var      0 # relies on nelem * elem_size
+
+wrd         u16     16bits
+mskw        i1      64bits # FIXME: bad name
+
+zmskw       i1      512bits
+
+zf32       f32      512bits
+zf64       f64      512bits
+
+zb         i8       512bits
+zw         i16      512bits
+zd         i32      512bits
+zq         i64      512bits
+
+zub         u8       512bits
+zuw         u16      512bits
+zud         u32      512bits
+zuq         u64      512bits
+
+# alternative names...
+zi8         i8       512bits
+zi16        i16      512bits
+zi32        i32      512bits
+zi64        i64      512bits
+
+zu8         u8       512bits
+zu16        u16      512bits
+zu32        u32      512bits
+zu64        u64      512bits
+zu128       u128     512bits
+
+
diff --git a/x86/x86spec/xeddata/database.go b/x86/xeddata/database.go
similarity index 97%
rename from x86/x86spec/xeddata/database.go
rename to x86/xeddata/database.go
index 115e22e..35d86d9 100644
--- a/x86/x86spec/xeddata/database.go
+++ b/x86/xeddata/database.go
@@ -6,6 +6,7 @@
 
 import (
 	"bytes"
+	"errors"
 	"fmt"
 	"io"
 	"io/ioutil"
@@ -113,6 +114,14 @@
 func NewDatabase(xedPath string) (*Database, error) {
 	var db Database
 
+	stat, err := os.Stat(xedPath)
+	if err != nil {
+		return nil, err
+	}
+	if !stat.IsDir() {
+		return nil, errors.New("xedPath is not directory")
+	}
+
 	states, err := os.Open(filepath.Join(xedPath, "all-state.txt"))
 	if err == nil {
 		err = db.LoadStates(states)
diff --git a/x86/x86spec/xeddata/doc.go b/x86/xeddata/doc.go
similarity index 93%
rename from x86/x86spec/xeddata/doc.go
rename to x86/xeddata/doc.go
index 8022857..bb1a96a 100644
--- a/x86/x86spec/xeddata/doc.go
+++ b/x86/xeddata/doc.go
@@ -33,7 +33,7 @@
 // See example_test.go for complete examples.
 //
 // It is required to build Intel XED before attempting to use
-// it's datafiles, as this package expects "all" versions that
+// its datafiles, as this package expects "all" versions that
 // are a concatenated final versions of datafiles.
 // If "$XED/obj/dgen/" does not contain relevant files,
 // then either this documentation is stale or your XED is not built.
@@ -42,7 +42,7 @@
 //
 // Intel XED https://github.com/intelxed/xed provides all documentation
 // that can be required to understand datafiles.
-// The "$XED/misc/engineering-notes.txt" is particullary useful.
+// The "$XED/misc/engineering-notes.txt" is particularly useful.
 // For convenience, the most important notes are spread across package comments.
 //
 // Tested with XED 088c48a2efa447872945168272bcd7005a7ddd91.
diff --git a/x86/x86spec/xeddata/example_test.go b/x86/xeddata/example_test.go
similarity index 96%
rename from x86/x86spec/xeddata/example_test.go
rename to x86/xeddata/example_test.go
index d9496da..d7b67d9 100644
--- a/x86/x86spec/xeddata/example_test.go
+++ b/x86/xeddata/example_test.go
@@ -9,7 +9,7 @@
 	"log"
 	"strings"
 
-	"golang.org/x/arch/x86/x86spec/xeddata"
+	"golang.org/x/arch/x86/xeddata"
 )
 
 // The "testdata/xedpath" directory contains XED metadata files
@@ -70,7 +70,7 @@
 	// 	[1] REG0=GPRv_R():cw REG1=GPRv_B():r
 }
 
-// This example shows how to use ExpandStates and it's effects.
+// This example shows how to use ExpandStates and its effects.
 func ExampleExpandStates() {
 	const xedPath = "testdata/xedpath"
 
@@ -150,7 +150,7 @@
 
 	inst := objects[0].Insts[0] // Single instruction is enough for this example
 	for i, rawOperand := range strings.Fields(inst.Operands) {
-		operand, err := xeddata.NewOperand(db, strings.Split(rawOperand, ":"))
+		operand, err := xeddata.NewOperand(db, rawOperand)
 		if err != nil {
 			log.Fatalf("parse operand #%d: %+v", i, err)
 		}
diff --git a/x86/x86spec/xeddata/object.go b/x86/xeddata/object.go
similarity index 100%
rename from x86/x86spec/xeddata/object.go
rename to x86/xeddata/object.go
diff --git a/x86/x86spec/xeddata/operand.go b/x86/xeddata/operand.go
similarity index 88%
rename from x86/x86spec/xeddata/operand.go
rename to x86/xeddata/operand.go
index 0d75f58..e934ed7 100644
--- a/x86/x86spec/xeddata/operand.go
+++ b/x86/xeddata/operand.go
@@ -86,28 +86,27 @@
 	"ECOND": VisEcond,
 }
 
-// NewOperand packs operand fields into Operand.
-// Fields are colon (":") separated parts of the OPERANDS column.
-//
-// At least two fixed-position fields are expected:
-//   [0] - name
-//   [1] - r/w action
+// NewOperand decodes operand string.
 //
 // See "$XED/pysrc/opnds.py" to learn about fields format
 // and valid combinations.
 //
 // Requires database with xtypes and widths info.
-func NewOperand(db *Database, fields []string) (Operand, error) {
+func NewOperand(db *Database, s string) (*Operand, error) {
+	if db.widths == nil {
+		return nil, errors.New("Database.widths is nil")
+	}
+
+	fields := strings.Split(s, ":")
+	switch len(fields) {
+	case 0:
+		return nil, errors.New("empty operand fields string")
+	case 1:
+		return &Operand{Name: fields[0]}, nil
+	}
 	var op Operand
 
-	if db.widths == nil {
-		return op, errors.New("Database.widths is nil")
-	}
-	if len(fields) < 2 {
-		return op, errors.New("fields should have at least 2 elements")
-	}
-
-	// First two fields are fixed and mandatory.
+	// First two fields are fixed.
 	op.Name = fields[0]
 	op.Action = fields[1]
 
@@ -127,7 +126,7 @@
 		}
 	}
 
-	return op, nil
+	return &op, nil
 }
 
 // NonterminalName returns true if op.Name consist
diff --git a/x86/xeddata/pattern_set.go b/x86/xeddata/pattern_set.go
new file mode 100644
index 0000000..acc30bd
--- /dev/null
+++ b/x86/xeddata/pattern_set.go
@@ -0,0 +1,95 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xeddata
+
+import (
+	"sort"
+	"strings"
+)
+
+// PatternSet wraps instruction PATTERN properties providing set operations on them.
+type PatternSet map[string]bool
+
+// NewPatternSet decodes pattern string into PatternSet.
+func NewPatternSet(pattern string) PatternSet {
+	pset := make(PatternSet)
+	for _, f := range strings.Fields(pattern) {
+		pset[f] = true
+	}
+	return pset
+}
+
+// PatternAliases is extendable map of pattern keys aliases.
+// Maps human-readable key to XED property.
+//
+// Used in PatternSet.Is.
+var PatternAliases = map[string]string{
+	"VEX":     "VEXVALID=1",
+	"EVEX":    "VEXVALID=2",
+	"XOP":     "VEXVALID=3",
+	"MemOnly": "MOD!=3",
+	"RegOnly": "MOD=3",
+}
+
+// String returns pattern printer representation.
+// All properties are sorted.
+func (pset PatternSet) String() string {
+	var keys []string
+	for k := range pset {
+		keys = append(keys, k)
+	}
+	sort.Strings(keys)
+	return strings.Join(keys, " ")
+}
+
+// Is reports whether set contains key k.
+// In contrast with direct pattern set lookup, it does
+// check if PatternAliases[k] is available to be used instead of k in lookup.
+func (pset PatternSet) Is(k string) bool {
+	if alias := PatternAliases[k]; alias != "" {
+		return pset[alias]
+	}
+	return pset[k]
+}
+
+// Replace inserts newKey if oldKey is defined.
+// oldKey is removed if insertion is performed.
+func (pset PatternSet) Replace(oldKey, newKey string) {
+	if pset[oldKey] {
+		pset[newKey] = true
+		delete(pset, oldKey)
+	}
+}
+
+// Index returns index from keys of first matching key.
+// Returns -1 if does not contain any of given keys.
+func (pset PatternSet) Index(keys ...string) int {
+	for i, k := range keys {
+		if pset[k] {
+			return i
+		}
+	}
+	return -1
+}
+
+// Match is like MatchOrDefault("", keyval...).
+func (pset PatternSet) Match(keyval ...string) string {
+	return pset.MatchOrDefault("", keyval...)
+}
+
+// MatchOrDefault returns first matching key associated value.
+// Returns defaultValue if no match is found.
+//
+// Keyval structure can be described as {"k1", "v1", ..., "kN", "vN"}.
+func (pset PatternSet) MatchOrDefault(defaultValue string, keyval ...string) string {
+	for i := 0; i < len(keyval); i += 2 {
+		key := keyval[i+0]
+		val := keyval[i+1]
+		if pset[key] {
+			return val
+		}
+	}
+	return defaultValue
+}
diff --git a/x86/x86spec/xeddata/reader.go b/x86/xeddata/reader.go
similarity index 100%
rename from x86/x86spec/xeddata/reader.go
rename to x86/xeddata/reader.go
diff --git a/x86/x86spec/xeddata/testdata/xed_objects.txt b/x86/xeddata/testdata/xed_objects.txt
similarity index 100%
rename from x86/x86spec/xeddata/testdata/xed_objects.txt
rename to x86/xeddata/testdata/xed_objects.txt
diff --git a/x86/x86spec/xeddata/testdata/xedpath/all-element-types.txt b/x86/xeddata/testdata/xedpath/all-element-types.txt
similarity index 100%
rename from x86/x86spec/xeddata/testdata/xedpath/all-element-types.txt
rename to x86/xeddata/testdata/xedpath/all-element-types.txt
diff --git a/x86/x86spec/xeddata/testdata/xedpath/all-state.txt b/x86/xeddata/testdata/xedpath/all-state.txt
similarity index 100%
rename from x86/x86spec/xeddata/testdata/xedpath/all-state.txt
rename to x86/xeddata/testdata/xedpath/all-state.txt
diff --git a/x86/x86spec/xeddata/testdata/xedpath/all-widths.txt b/x86/xeddata/testdata/xedpath/all-widths.txt
similarity index 100%
rename from x86/x86spec/xeddata/testdata/xedpath/all-widths.txt
rename to x86/xeddata/testdata/xedpath/all-widths.txt
diff --git a/x86/xeddata/xeddata.go b/x86/xeddata/xeddata.go
new file mode 100644
index 0000000..7cc7a08
--- /dev/null
+++ b/x86/xeddata/xeddata.go
@@ -0,0 +1,32 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xeddata
+
+import (
+	"io"
+	"os"
+	"path/filepath"
+)
+
+// WalkInsts calls visit function for each XED instruction found at $xedPath/all-dec-instructions.txt.
+func WalkInsts(xedPath string, visit func(*Inst)) error {
+	f, err := os.Open(filepath.Join(xedPath, "all-dec-instructions.txt"))
+	if err != nil {
+		return err
+	}
+	r := NewReader(f)
+	for {
+		o, err := r.Read()
+		if err == io.EOF {
+			return nil
+		}
+		if err != nil {
+			return err
+		}
+		for _, inst := range o.Insts {
+			visit(inst)
+		}
+	}
+}
diff --git a/x86/x86spec/xeddata/xeddata_test.go b/x86/xeddata/xeddata_test.go
similarity index 98%
rename from x86/x86spec/xeddata/xeddata_test.go
rename to x86/xeddata/xeddata_test.go
index b3d93fd..ad0de1e 100644
--- a/x86/x86spec/xeddata/xeddata_test.go
+++ b/x86/xeddata/xeddata_test.go
@@ -340,11 +340,11 @@
 
 	db := newTestDatabase(t)
 	for _, test := range tests {
-		op, err := NewOperand(db, strings.Split(test.input, ":"))
+		op, err := NewOperand(db, test.input)
 		if err != nil {
 			t.Fatal(err)
 		}
-		if !reflect.DeepEqual(op, test.op) {
+		if !reflect.DeepEqual(*op, test.op) {
 			t.Errorf("parse(`%s`): output mismatch\nhave: %#v\nwant: %#v",
 				test.input, op, test.op,
 			)