blob: 8655880b38981be0eab928dc543c9d88dc9a078c [file] [log] [blame]
// Code generated from gen/ARM64.rules; DO NOT EDIT.
// generated with: cd gen; go run *.go
package ssa
import "cmd/compile/internal/types"
func rewriteValueARM64(v *Value) bool {
switch v.Op {
case OpARM64ADCSflags:
return rewriteValueARM64_OpARM64ADCSflags(v)
case OpARM64ADD:
return rewriteValueARM64_OpARM64ADD(v)
case OpARM64ADDconst:
return rewriteValueARM64_OpARM64ADDconst(v)
case OpARM64ADDshiftLL:
return rewriteValueARM64_OpARM64ADDshiftLL(v)
case OpARM64ADDshiftRA:
return rewriteValueARM64_OpARM64ADDshiftRA(v)
case OpARM64ADDshiftRL:
return rewriteValueARM64_OpARM64ADDshiftRL(v)
case OpARM64AND:
return rewriteValueARM64_OpARM64AND(v)
case OpARM64ANDconst:
return rewriteValueARM64_OpARM64ANDconst(v)
case OpARM64ANDshiftLL:
return rewriteValueARM64_OpARM64ANDshiftLL(v)
case OpARM64ANDshiftRA:
return rewriteValueARM64_OpARM64ANDshiftRA(v)
case OpARM64ANDshiftRL:
return rewriteValueARM64_OpARM64ANDshiftRL(v)
case OpARM64BIC:
return rewriteValueARM64_OpARM64BIC(v)
case OpARM64BICshiftLL:
return rewriteValueARM64_OpARM64BICshiftLL(v)
case OpARM64BICshiftRA:
return rewriteValueARM64_OpARM64BICshiftRA(v)
case OpARM64BICshiftRL:
return rewriteValueARM64_OpARM64BICshiftRL(v)
case OpARM64CMN:
return rewriteValueARM64_OpARM64CMN(v)
case OpARM64CMNW:
return rewriteValueARM64_OpARM64CMNW(v)
case OpARM64CMNWconst:
return rewriteValueARM64_OpARM64CMNWconst(v)
case OpARM64CMNconst:
return rewriteValueARM64_OpARM64CMNconst(v)
case OpARM64CMNshiftLL:
return rewriteValueARM64_OpARM64CMNshiftLL(v)
case OpARM64CMNshiftRA:
return rewriteValueARM64_OpARM64CMNshiftRA(v)
case OpARM64CMNshiftRL:
return rewriteValueARM64_OpARM64CMNshiftRL(v)
case OpARM64CMP:
return rewriteValueARM64_OpARM64CMP(v)
case OpARM64CMPW:
return rewriteValueARM64_OpARM64CMPW(v)
case OpARM64CMPWconst:
return rewriteValueARM64_OpARM64CMPWconst(v)
case OpARM64CMPconst:
return rewriteValueARM64_OpARM64CMPconst(v)
case OpARM64CMPshiftLL:
return rewriteValueARM64_OpARM64CMPshiftLL(v)
case OpARM64CMPshiftRA:
return rewriteValueARM64_OpARM64CMPshiftRA(v)
case OpARM64CMPshiftRL:
return rewriteValueARM64_OpARM64CMPshiftRL(v)
case OpARM64CSEL:
return rewriteValueARM64_OpARM64CSEL(v)
case OpARM64CSEL0:
return rewriteValueARM64_OpARM64CSEL0(v)
case OpARM64DIV:
return rewriteValueARM64_OpARM64DIV(v)
case OpARM64DIVW:
return rewriteValueARM64_OpARM64DIVW(v)
case OpARM64EON:
return rewriteValueARM64_OpARM64EON(v)
case OpARM64EONshiftLL:
return rewriteValueARM64_OpARM64EONshiftLL(v)
case OpARM64EONshiftRA:
return rewriteValueARM64_OpARM64EONshiftRA(v)
case OpARM64EONshiftRL:
return rewriteValueARM64_OpARM64EONshiftRL(v)
case OpARM64Equal:
return rewriteValueARM64_OpARM64Equal(v)
case OpARM64FADDD:
return rewriteValueARM64_OpARM64FADDD(v)
case OpARM64FADDS:
return rewriteValueARM64_OpARM64FADDS(v)
case OpARM64FCMPD:
return rewriteValueARM64_OpARM64FCMPD(v)
case OpARM64FCMPS:
return rewriteValueARM64_OpARM64FCMPS(v)
case OpARM64FMOVDfpgp:
return rewriteValueARM64_OpARM64FMOVDfpgp(v)
case OpARM64FMOVDgpfp:
return rewriteValueARM64_OpARM64FMOVDgpfp(v)
case OpARM64FMOVDload:
return rewriteValueARM64_OpARM64FMOVDload(v)
case OpARM64FMOVDloadidx:
return rewriteValueARM64_OpARM64FMOVDloadidx(v)
case OpARM64FMOVDstore:
return rewriteValueARM64_OpARM64FMOVDstore(v)
case OpARM64FMOVDstoreidx:
return rewriteValueARM64_OpARM64FMOVDstoreidx(v)
case OpARM64FMOVSload:
return rewriteValueARM64_OpARM64FMOVSload(v)
case OpARM64FMOVSloadidx:
return rewriteValueARM64_OpARM64FMOVSloadidx(v)
case OpARM64FMOVSstore:
return rewriteValueARM64_OpARM64FMOVSstore(v)
case OpARM64FMOVSstoreidx:
return rewriteValueARM64_OpARM64FMOVSstoreidx(v)
case OpARM64FMULD:
return rewriteValueARM64_OpARM64FMULD(v)
case OpARM64FMULS:
return rewriteValueARM64_OpARM64FMULS(v)
case OpARM64FNEGD:
return rewriteValueARM64_OpARM64FNEGD(v)
case OpARM64FNEGS:
return rewriteValueARM64_OpARM64FNEGS(v)
case OpARM64FNMULD:
return rewriteValueARM64_OpARM64FNMULD(v)
case OpARM64FNMULS:
return rewriteValueARM64_OpARM64FNMULS(v)
case OpARM64FSUBD:
return rewriteValueARM64_OpARM64FSUBD(v)
case OpARM64FSUBS:
return rewriteValueARM64_OpARM64FSUBS(v)
case OpARM64GreaterEqual:
return rewriteValueARM64_OpARM64GreaterEqual(v)
case OpARM64GreaterEqualF:
return rewriteValueARM64_OpARM64GreaterEqualF(v)
case OpARM64GreaterEqualU:
return rewriteValueARM64_OpARM64GreaterEqualU(v)
case OpARM64GreaterThan:
return rewriteValueARM64_OpARM64GreaterThan(v)
case OpARM64GreaterThanF:
return rewriteValueARM64_OpARM64GreaterThanF(v)
case OpARM64GreaterThanU:
return rewriteValueARM64_OpARM64GreaterThanU(v)
case OpARM64LessEqual:
return rewriteValueARM64_OpARM64LessEqual(v)
case OpARM64LessEqualF:
return rewriteValueARM64_OpARM64LessEqualF(v)
case OpARM64LessEqualU:
return rewriteValueARM64_OpARM64LessEqualU(v)
case OpARM64LessThan:
return rewriteValueARM64_OpARM64LessThan(v)
case OpARM64LessThanF:
return rewriteValueARM64_OpARM64LessThanF(v)
case OpARM64LessThanU:
return rewriteValueARM64_OpARM64LessThanU(v)
case OpARM64MADD:
return rewriteValueARM64_OpARM64MADD(v)
case OpARM64MADDW:
return rewriteValueARM64_OpARM64MADDW(v)
case OpARM64MNEG:
return rewriteValueARM64_OpARM64MNEG(v)
case OpARM64MNEGW:
return rewriteValueARM64_OpARM64MNEGW(v)
case OpARM64MOD:
return rewriteValueARM64_OpARM64MOD(v)
case OpARM64MODW:
return rewriteValueARM64_OpARM64MODW(v)
case OpARM64MOVBUload:
return rewriteValueARM64_OpARM64MOVBUload(v)
case OpARM64MOVBUloadidx:
return rewriteValueARM64_OpARM64MOVBUloadidx(v)
case OpARM64MOVBUreg:
return rewriteValueARM64_OpARM64MOVBUreg(v)
case OpARM64MOVBload:
return rewriteValueARM64_OpARM64MOVBload(v)
case OpARM64MOVBloadidx:
return rewriteValueARM64_OpARM64MOVBloadidx(v)
case OpARM64MOVBreg:
return rewriteValueARM64_OpARM64MOVBreg(v)
case OpARM64MOVBstore:
return rewriteValueARM64_OpARM64MOVBstore(v)
case OpARM64MOVBstoreidx:
return rewriteValueARM64_OpARM64MOVBstoreidx(v)
case OpARM64MOVBstorezero:
return rewriteValueARM64_OpARM64MOVBstorezero(v)
case OpARM64MOVBstorezeroidx:
return rewriteValueARM64_OpARM64MOVBstorezeroidx(v)
case OpARM64MOVDload:
return rewriteValueARM64_OpARM64MOVDload(v)
case OpARM64MOVDloadidx:
return rewriteValueARM64_OpARM64MOVDloadidx(v)
case OpARM64MOVDloadidx8:
return rewriteValueARM64_OpARM64MOVDloadidx8(v)
case OpARM64MOVDreg:
return rewriteValueARM64_OpARM64MOVDreg(v)
case OpARM64MOVDstore:
return rewriteValueARM64_OpARM64MOVDstore(v)
case OpARM64MOVDstoreidx:
return rewriteValueARM64_OpARM64MOVDstoreidx(v)
case OpARM64MOVDstoreidx8:
return rewriteValueARM64_OpARM64MOVDstoreidx8(v)
case OpARM64MOVDstorezero:
return rewriteValueARM64_OpARM64MOVDstorezero(v)
case OpARM64MOVDstorezeroidx:
return rewriteValueARM64_OpARM64MOVDstorezeroidx(v)
case OpARM64MOVDstorezeroidx8:
return rewriteValueARM64_OpARM64MOVDstorezeroidx8(v)
case OpARM64MOVHUload:
return rewriteValueARM64_OpARM64MOVHUload(v)
case OpARM64MOVHUloadidx:
return rewriteValueARM64_OpARM64MOVHUloadidx(v)
case OpARM64MOVHUloadidx2:
return rewriteValueARM64_OpARM64MOVHUloadidx2(v)
case OpARM64MOVHUreg:
return rewriteValueARM64_OpARM64MOVHUreg(v)
case OpARM64MOVHload:
return rewriteValueARM64_OpARM64MOVHload(v)
case OpARM64MOVHloadidx:
return rewriteValueARM64_OpARM64MOVHloadidx(v)
case OpARM64MOVHloadidx2:
return rewriteValueARM64_OpARM64MOVHloadidx2(v)
case OpARM64MOVHreg:
return rewriteValueARM64_OpARM64MOVHreg(v)
case OpARM64MOVHstore:
return rewriteValueARM64_OpARM64MOVHstore(v)
case OpARM64MOVHstoreidx:
return rewriteValueARM64_OpARM64MOVHstoreidx(v)
case OpARM64MOVHstoreidx2:
return rewriteValueARM64_OpARM64MOVHstoreidx2(v)
case OpARM64MOVHstorezero:
return rewriteValueARM64_OpARM64MOVHstorezero(v)
case OpARM64MOVHstorezeroidx:
return rewriteValueARM64_OpARM64MOVHstorezeroidx(v)
case OpARM64MOVHstorezeroidx2:
return rewriteValueARM64_OpARM64MOVHstorezeroidx2(v)
case OpARM64MOVQstorezero:
return rewriteValueARM64_OpARM64MOVQstorezero(v)
case OpARM64MOVWUload:
return rewriteValueARM64_OpARM64MOVWUload(v)
case OpARM64MOVWUloadidx:
return rewriteValueARM64_OpARM64MOVWUloadidx(v)
case OpARM64MOVWUloadidx4:
return rewriteValueARM64_OpARM64MOVWUloadidx4(v)
case OpARM64MOVWUreg:
return rewriteValueARM64_OpARM64MOVWUreg(v)
case OpARM64MOVWload:
return rewriteValueARM64_OpARM64MOVWload(v)
case OpARM64MOVWloadidx:
return rewriteValueARM64_OpARM64MOVWloadidx(v)
case OpARM64MOVWloadidx4:
return rewriteValueARM64_OpARM64MOVWloadidx4(v)
case OpARM64MOVWreg:
return rewriteValueARM64_OpARM64MOVWreg(v)
case OpARM64MOVWstore:
return rewriteValueARM64_OpARM64MOVWstore(v)
case OpARM64MOVWstoreidx:
return rewriteValueARM64_OpARM64MOVWstoreidx(v)
case OpARM64MOVWstoreidx4:
return rewriteValueARM64_OpARM64MOVWstoreidx4(v)
case OpARM64MOVWstorezero:
return rewriteValueARM64_OpARM64MOVWstorezero(v)
case OpARM64MOVWstorezeroidx:
return rewriteValueARM64_OpARM64MOVWstorezeroidx(v)
case OpARM64MOVWstorezeroidx4:
return rewriteValueARM64_OpARM64MOVWstorezeroidx4(v)
case OpARM64MSUB:
return rewriteValueARM64_OpARM64MSUB(v)
case OpARM64MSUBW:
return rewriteValueARM64_OpARM64MSUBW(v)
case OpARM64MUL:
return rewriteValueARM64_OpARM64MUL(v)
case OpARM64MULW:
return rewriteValueARM64_OpARM64MULW(v)
case OpARM64MVN:
return rewriteValueARM64_OpARM64MVN(v)
case OpARM64MVNshiftLL:
return rewriteValueARM64_OpARM64MVNshiftLL(v)
case OpARM64MVNshiftRA:
return rewriteValueARM64_OpARM64MVNshiftRA(v)
case OpARM64MVNshiftRL:
return rewriteValueARM64_OpARM64MVNshiftRL(v)
case OpARM64NEG:
return rewriteValueARM64_OpARM64NEG(v)
case OpARM64NEGshiftLL:
return rewriteValueARM64_OpARM64NEGshiftLL(v)
case OpARM64NEGshiftRA:
return rewriteValueARM64_OpARM64NEGshiftRA(v)
case OpARM64NEGshiftRL:
return rewriteValueARM64_OpARM64NEGshiftRL(v)
case OpARM64NotEqual:
return rewriteValueARM64_OpARM64NotEqual(v)
case OpARM64OR:
return rewriteValueARM64_OpARM64OR(v)
case OpARM64ORN:
return rewriteValueARM64_OpARM64ORN(v)
case OpARM64ORNshiftLL:
return rewriteValueARM64_OpARM64ORNshiftLL(v)
case OpARM64ORNshiftRA:
return rewriteValueARM64_OpARM64ORNshiftRA(v)
case OpARM64ORNshiftRL:
return rewriteValueARM64_OpARM64ORNshiftRL(v)
case OpARM64ORconst:
return rewriteValueARM64_OpARM64ORconst(v)
case OpARM64ORshiftLL:
return rewriteValueARM64_OpARM64ORshiftLL(v)
case OpARM64ORshiftRA:
return rewriteValueARM64_OpARM64ORshiftRA(v)
case OpARM64ORshiftRL:
return rewriteValueARM64_OpARM64ORshiftRL(v)
case OpARM64RORWconst:
return rewriteValueARM64_OpARM64RORWconst(v)
case OpARM64RORconst:
return rewriteValueARM64_OpARM64RORconst(v)
case OpARM64SBCSflags:
return rewriteValueARM64_OpARM64SBCSflags(v)
case OpARM64SLL:
return rewriteValueARM64_OpARM64SLL(v)
case OpARM64SLLconst:
return rewriteValueARM64_OpARM64SLLconst(v)
case OpARM64SRA:
return rewriteValueARM64_OpARM64SRA(v)
case OpARM64SRAconst:
return rewriteValueARM64_OpARM64SRAconst(v)
case OpARM64SRL:
return rewriteValueARM64_OpARM64SRL(v)
case OpARM64SRLconst:
return rewriteValueARM64_OpARM64SRLconst(v)
case OpARM64STP:
return rewriteValueARM64_OpARM64STP(v)
case OpARM64SUB:
return rewriteValueARM64_OpARM64SUB(v)
case OpARM64SUBconst:
return rewriteValueARM64_OpARM64SUBconst(v)
case OpARM64SUBshiftLL:
return rewriteValueARM64_OpARM64SUBshiftLL(v)
case OpARM64SUBshiftRA:
return rewriteValueARM64_OpARM64SUBshiftRA(v)
case OpARM64SUBshiftRL:
return rewriteValueARM64_OpARM64SUBshiftRL(v)
case OpARM64TST:
return rewriteValueARM64_OpARM64TST(v)
case OpARM64TSTW:
return rewriteValueARM64_OpARM64TSTW(v)
case OpARM64TSTWconst:
return rewriteValueARM64_OpARM64TSTWconst(v)
case OpARM64TSTconst:
return rewriteValueARM64_OpARM64TSTconst(v)
case OpARM64TSTshiftLL:
return rewriteValueARM64_OpARM64TSTshiftLL(v)
case OpARM64TSTshiftRA:
return rewriteValueARM64_OpARM64TSTshiftRA(v)
case OpARM64TSTshiftRL:
return rewriteValueARM64_OpARM64TSTshiftRL(v)
case OpARM64UBFIZ:
return rewriteValueARM64_OpARM64UBFIZ(v)
case OpARM64UBFX:
return rewriteValueARM64_OpARM64UBFX(v)
case OpARM64UDIV:
return rewriteValueARM64_OpARM64UDIV(v)
case OpARM64UDIVW:
return rewriteValueARM64_OpARM64UDIVW(v)
case OpARM64UMOD:
return rewriteValueARM64_OpARM64UMOD(v)
case OpARM64UMODW:
return rewriteValueARM64_OpARM64UMODW(v)
case OpARM64XOR:
return rewriteValueARM64_OpARM64XOR(v)
case OpARM64XORconst:
return rewriteValueARM64_OpARM64XORconst(v)
case OpARM64XORshiftLL:
return rewriteValueARM64_OpARM64XORshiftLL(v)
case OpARM64XORshiftRA:
return rewriteValueARM64_OpARM64XORshiftRA(v)
case OpARM64XORshiftRL:
return rewriteValueARM64_OpARM64XORshiftRL(v)
case OpAbs:
v.Op = OpARM64FABSD
return true
case OpAdd16:
v.Op = OpARM64ADD
return true
case OpAdd32:
v.Op = OpARM64ADD
return true
case OpAdd32F:
v.Op = OpARM64FADDS
return true
case OpAdd64:
v.Op = OpARM64ADD
return true
case OpAdd64F:
v.Op = OpARM64FADDD
return true
case OpAdd8:
v.Op = OpARM64ADD
return true
case OpAddPtr:
v.Op = OpARM64ADD
return true
case OpAddr:
return rewriteValueARM64_OpAddr(v)
case OpAnd16:
v.Op = OpARM64AND
return true
case OpAnd32:
v.Op = OpARM64AND
return true
case OpAnd64:
v.Op = OpARM64AND
return true
case OpAnd8:
v.Op = OpARM64AND
return true
case OpAndB:
v.Op = OpARM64AND
return true
case OpAtomicAdd32:
v.Op = OpARM64LoweredAtomicAdd32
return true
case OpAtomicAdd32Variant:
v.Op = OpARM64LoweredAtomicAdd32Variant
return true
case OpAtomicAdd64:
v.Op = OpARM64LoweredAtomicAdd64
return true
case OpAtomicAdd64Variant:
v.Op = OpARM64LoweredAtomicAdd64Variant
return true
case OpAtomicAnd8:
return rewriteValueARM64_OpAtomicAnd8(v)
case OpAtomicCompareAndSwap32:
v.Op = OpARM64LoweredAtomicCas32
return true
case OpAtomicCompareAndSwap64:
v.Op = OpARM64LoweredAtomicCas64
return true
case OpAtomicExchange32:
v.Op = OpARM64LoweredAtomicExchange32
return true
case OpAtomicExchange64:
v.Op = OpARM64LoweredAtomicExchange64
return true
case OpAtomicLoad32:
v.Op = OpARM64LDARW
return true
case OpAtomicLoad64:
v.Op = OpARM64LDAR
return true
case OpAtomicLoad8:
v.Op = OpARM64LDARB
return true
case OpAtomicLoadPtr:
v.Op = OpARM64LDAR
return true
case OpAtomicOr8:
return rewriteValueARM64_OpAtomicOr8(v)
case OpAtomicStore32:
v.Op = OpARM64STLRW
return true
case OpAtomicStore64:
v.Op = OpARM64STLR
return true
case OpAtomicStore8:
v.Op = OpARM64STLRB
return true
case OpAtomicStorePtrNoWB:
v.Op = OpARM64STLR
return true
case OpAvg64u:
return rewriteValueARM64_OpAvg64u(v)
case OpBitLen32:
return rewriteValueARM64_OpBitLen32(v)
case OpBitLen64:
return rewriteValueARM64_OpBitLen64(v)
case OpBitRev16:
return rewriteValueARM64_OpBitRev16(v)
case OpBitRev32:
v.Op = OpARM64RBITW
return true
case OpBitRev64:
v.Op = OpARM64RBIT
return true
case OpBitRev8:
return rewriteValueARM64_OpBitRev8(v)
case OpBswap32:
v.Op = OpARM64REVW
return true
case OpBswap64:
v.Op = OpARM64REV
return true
case OpCeil:
v.Op = OpARM64FRINTPD
return true
case OpClosureCall:
v.Op = OpARM64CALLclosure
return true
case OpCom16:
v.Op = OpARM64MVN
return true
case OpCom32:
v.Op = OpARM64MVN
return true
case OpCom64:
v.Op = OpARM64MVN
return true
case OpCom8:
v.Op = OpARM64MVN
return true
case OpCondSelect:
return rewriteValueARM64_OpCondSelect(v)
case OpConst16:
return rewriteValueARM64_OpConst16(v)
case OpConst32:
return rewriteValueARM64_OpConst32(v)
case OpConst32F:
return rewriteValueARM64_OpConst32F(v)
case OpConst64:
return rewriteValueARM64_OpConst64(v)
case OpConst64F:
return rewriteValueARM64_OpConst64F(v)
case OpConst8:
return rewriteValueARM64_OpConst8(v)
case OpConstBool:
return rewriteValueARM64_OpConstBool(v)
case OpConstNil:
return rewriteValueARM64_OpConstNil(v)
case OpCtz16:
return rewriteValueARM64_OpCtz16(v)
case OpCtz16NonZero:
v.Op = OpCtz32
return true
case OpCtz32:
return rewriteValueARM64_OpCtz32(v)
case OpCtz32NonZero:
v.Op = OpCtz32
return true
case OpCtz64:
return rewriteValueARM64_OpCtz64(v)
case OpCtz64NonZero:
v.Op = OpCtz64
return true
case OpCtz8:
return rewriteValueARM64_OpCtz8(v)
case OpCtz8NonZero:
v.Op = OpCtz32
return true
case OpCvt32Fto32:
v.Op = OpARM64FCVTZSSW
return true
case OpCvt32Fto32U:
v.Op = OpARM64FCVTZUSW
return true
case OpCvt32Fto64:
v.Op = OpARM64FCVTZSS
return true
case OpCvt32Fto64F:
v.Op = OpARM64FCVTSD
return true
case OpCvt32Fto64U:
v.Op = OpARM64FCVTZUS
return true
case OpCvt32Uto32F:
v.Op = OpARM64UCVTFWS
return true
case OpCvt32Uto64F:
v.Op = OpARM64UCVTFWD
return true
case OpCvt32to32F:
v.Op = OpARM64SCVTFWS
return true
case OpCvt32to64F:
v.Op = OpARM64SCVTFWD
return true
case OpCvt64Fto32:
v.Op = OpARM64FCVTZSDW
return true
case OpCvt64Fto32F:
v.Op = OpARM64FCVTDS
return true
case OpCvt64Fto32U:
v.Op = OpARM64FCVTZUDW
return true
case OpCvt64Fto64:
v.Op = OpARM64FCVTZSD
return true
case OpCvt64Fto64U:
v.Op = OpARM64FCVTZUD
return true
case OpCvt64Uto32F:
v.Op = OpARM64UCVTFS
return true
case OpCvt64Uto64F:
v.Op = OpARM64UCVTFD
return true
case OpCvt64to32F:
v.Op = OpARM64SCVTFS
return true
case OpCvt64to64F:
v.Op = OpARM64SCVTFD
return true
case OpCvtBoolToUint8:
v.Op = OpCopy
return true
case OpDiv16:
return rewriteValueARM64_OpDiv16(v)
case OpDiv16u:
return rewriteValueARM64_OpDiv16u(v)
case OpDiv32:
return rewriteValueARM64_OpDiv32(v)
case OpDiv32F:
v.Op = OpARM64FDIVS
return true
case OpDiv32u:
v.Op = OpARM64UDIVW
return true
case OpDiv64:
return rewriteValueARM64_OpDiv64(v)
case OpDiv64F:
v.Op = OpARM64FDIVD
return true
case OpDiv64u:
v.Op = OpARM64UDIV
return true
case OpDiv8:
return rewriteValueARM64_OpDiv8(v)
case OpDiv8u:
return rewriteValueARM64_OpDiv8u(v)
case OpEq16:
return rewriteValueARM64_OpEq16(v)
case OpEq32:
return rewriteValueARM64_OpEq32(v)
case OpEq32F:
return rewriteValueARM64_OpEq32F(v)
case OpEq64:
return rewriteValueARM64_OpEq64(v)
case OpEq64F:
return rewriteValueARM64_OpEq64F(v)
case OpEq8:
return rewriteValueARM64_OpEq8(v)
case OpEqB:
return rewriteValueARM64_OpEqB(v)
case OpEqPtr:
return rewriteValueARM64_OpEqPtr(v)
case OpFMA:
return rewriteValueARM64_OpFMA(v)
case OpFloor:
v.Op = OpARM64FRINTMD
return true
case OpGetCallerPC:
v.Op = OpARM64LoweredGetCallerPC
return true
case OpGetCallerSP:
v.Op = OpARM64LoweredGetCallerSP
return true
case OpGetClosurePtr:
v.Op = OpARM64LoweredGetClosurePtr
return true
case OpHmul32:
return rewriteValueARM64_OpHmul32(v)
case OpHmul32u:
return rewriteValueARM64_OpHmul32u(v)
case OpHmul64:
v.Op = OpARM64MULH
return true
case OpHmul64u:
v.Op = OpARM64UMULH
return true
case OpInterCall:
v.Op = OpARM64CALLinter
return true
case OpIsInBounds:
return rewriteValueARM64_OpIsInBounds(v)
case OpIsNonNil:
return rewriteValueARM64_OpIsNonNil(v)
case OpIsSliceInBounds:
return rewriteValueARM64_OpIsSliceInBounds(v)
case OpLeq16:
return rewriteValueARM64_OpLeq16(v)
case OpLeq16U:
return rewriteValueARM64_OpLeq16U(v)
case OpLeq32:
return rewriteValueARM64_OpLeq32(v)
case OpLeq32F:
return rewriteValueARM64_OpLeq32F(v)
case OpLeq32U:
return rewriteValueARM64_OpLeq32U(v)
case OpLeq64:
return rewriteValueARM64_OpLeq64(v)
case OpLeq64F:
return rewriteValueARM64_OpLeq64F(v)
case OpLeq64U:
return rewriteValueARM64_OpLeq64U(v)
case OpLeq8:
return rewriteValueARM64_OpLeq8(v)
case OpLeq8U:
return rewriteValueARM64_OpLeq8U(v)
case OpLess16:
return rewriteValueARM64_OpLess16(v)
case OpLess16U:
return rewriteValueARM64_OpLess16U(v)
case OpLess32:
return rewriteValueARM64_OpLess32(v)
case OpLess32F:
return rewriteValueARM64_OpLess32F(v)
case OpLess32U:
return rewriteValueARM64_OpLess32U(v)
case OpLess64:
return rewriteValueARM64_OpLess64(v)
case OpLess64F:
return rewriteValueARM64_OpLess64F(v)
case OpLess64U:
return rewriteValueARM64_OpLess64U(v)
case OpLess8:
return rewriteValueARM64_OpLess8(v)
case OpLess8U:
return rewriteValueARM64_OpLess8U(v)
case OpLoad:
return rewriteValueARM64_OpLoad(v)
case OpLocalAddr:
return rewriteValueARM64_OpLocalAddr(v)
case OpLsh16x16:
return rewriteValueARM64_OpLsh16x16(v)
case OpLsh16x32:
return rewriteValueARM64_OpLsh16x32(v)
case OpLsh16x64:
return rewriteValueARM64_OpLsh16x64(v)
case OpLsh16x8:
return rewriteValueARM64_OpLsh16x8(v)
case OpLsh32x16:
return rewriteValueARM64_OpLsh32x16(v)
case OpLsh32x32:
return rewriteValueARM64_OpLsh32x32(v)
case OpLsh32x64:
return rewriteValueARM64_OpLsh32x64(v)
case OpLsh32x8:
return rewriteValueARM64_OpLsh32x8(v)
case OpLsh64x16:
return rewriteValueARM64_OpLsh64x16(v)
case OpLsh64x32:
return rewriteValueARM64_OpLsh64x32(v)
case OpLsh64x64:
return rewriteValueARM64_OpLsh64x64(v)
case OpLsh64x8:
return rewriteValueARM64_OpLsh64x8(v)
case OpLsh8x16:
return rewriteValueARM64_OpLsh8x16(v)
case OpLsh8x32:
return rewriteValueARM64_OpLsh8x32(v)
case OpLsh8x64:
return rewriteValueARM64_OpLsh8x64(v)
case OpLsh8x8:
return rewriteValueARM64_OpLsh8x8(v)
case OpMod16:
return rewriteValueARM64_OpMod16(v)
case OpMod16u:
return rewriteValueARM64_OpMod16u(v)
case OpMod32:
return rewriteValueARM64_OpMod32(v)
case OpMod32u:
v.Op = OpARM64UMODW
return true
case OpMod64:
return rewriteValueARM64_OpMod64(v)
case OpMod64u:
v.Op = OpARM64UMOD
return true
case OpMod8:
return rewriteValueARM64_OpMod8(v)
case OpMod8u:
return rewriteValueARM64_OpMod8u(v)
case OpMove:
return rewriteValueARM64_OpMove(v)
case OpMul16:
v.Op = OpARM64MULW
return true
case OpMul32:
v.Op = OpARM64MULW
return true
case OpMul32F:
v.Op = OpARM64FMULS
return true
case OpMul64:
v.Op = OpARM64MUL
return true
case OpMul64F:
v.Op = OpARM64FMULD
return true
case OpMul64uhilo:
v.Op = OpARM64LoweredMuluhilo
return true
case OpMul8:
v.Op = OpARM64MULW
return true
case OpNeg16:
v.Op = OpARM64NEG
return true
case OpNeg32:
v.Op = OpARM64NEG
return true
case OpNeg32F:
v.Op = OpARM64FNEGS
return true
case OpNeg64:
v.Op = OpARM64NEG
return true
case OpNeg64F:
v.Op = OpARM64FNEGD
return true
case OpNeg8:
v.Op = OpARM64NEG
return true
case OpNeq16:
return rewriteValueARM64_OpNeq16(v)
case OpNeq32:
return rewriteValueARM64_OpNeq32(v)
case OpNeq32F:
return rewriteValueARM64_OpNeq32F(v)
case OpNeq64:
return rewriteValueARM64_OpNeq64(v)
case OpNeq64F:
return rewriteValueARM64_OpNeq64F(v)
case OpNeq8:
return rewriteValueARM64_OpNeq8(v)
case OpNeqB:
v.Op = OpARM64XOR
return true
case OpNeqPtr:
return rewriteValueARM64_OpNeqPtr(v)
case OpNilCheck:
v.Op = OpARM64LoweredNilCheck
return true
case OpNot:
return rewriteValueARM64_OpNot(v)
case OpOffPtr:
return rewriteValueARM64_OpOffPtr(v)
case OpOr16:
v.Op = OpARM64OR
return true
case OpOr32:
v.Op = OpARM64OR
return true
case OpOr64:
v.Op = OpARM64OR
return true
case OpOr8:
v.Op = OpARM64OR
return true
case OpOrB:
v.Op = OpARM64OR
return true
case OpPanicBounds:
return rewriteValueARM64_OpPanicBounds(v)
case OpPopCount16:
return rewriteValueARM64_OpPopCount16(v)
case OpPopCount32:
return rewriteValueARM64_OpPopCount32(v)
case OpPopCount64:
return rewriteValueARM64_OpPopCount64(v)
case OpRotateLeft16:
return rewriteValueARM64_OpRotateLeft16(v)
case OpRotateLeft32:
return rewriteValueARM64_OpRotateLeft32(v)
case OpRotateLeft64:
return rewriteValueARM64_OpRotateLeft64(v)
case OpRotateLeft8:
return rewriteValueARM64_OpRotateLeft8(v)
case OpRound:
v.Op = OpARM64FRINTAD
return true
case OpRound32F:
v.Op = OpARM64LoweredRound32F
return true
case OpRound64F:
v.Op = OpARM64LoweredRound64F
return true
case OpRoundToEven:
v.Op = OpARM64FRINTND
return true
case OpRsh16Ux16:
return rewriteValueARM64_OpRsh16Ux16(v)
case OpRsh16Ux32:
return rewriteValueARM64_OpRsh16Ux32(v)
case OpRsh16Ux64:
return rewriteValueARM64_OpRsh16Ux64(v)
case OpRsh16Ux8:
return rewriteValueARM64_OpRsh16Ux8(v)
case OpRsh16x16:
return rewriteValueARM64_OpRsh16x16(v)
case OpRsh16x32:
return rewriteValueARM64_OpRsh16x32(v)
case OpRsh16x64:
return rewriteValueARM64_OpRsh16x64(v)
case OpRsh16x8:
return rewriteValueARM64_OpRsh16x8(v)
case OpRsh32Ux16:
return rewriteValueARM64_OpRsh32Ux16(v)
case OpRsh32Ux32:
return rewriteValueARM64_OpRsh32Ux32(v)
case OpRsh32Ux64:
return rewriteValueARM64_OpRsh32Ux64(v)
case OpRsh32Ux8:
return rewriteValueARM64_OpRsh32Ux8(v)
case OpRsh32x16:
return rewriteValueARM64_OpRsh32x16(v)
case OpRsh32x32:
return rewriteValueARM64_OpRsh32x32(v)
case OpRsh32x64:
return rewriteValueARM64_OpRsh32x64(v)
case OpRsh32x8:
return rewriteValueARM64_OpRsh32x8(v)
case OpRsh64Ux16:
return rewriteValueARM64_OpRsh64Ux16(v)
case OpRsh64Ux32:
return rewriteValueARM64_OpRsh64Ux32(v)
case OpRsh64Ux64:
return rewriteValueARM64_OpRsh64Ux64(v)
case OpRsh64Ux8:
return rewriteValueARM64_OpRsh64Ux8(v)
case OpRsh64x16:
return rewriteValueARM64_OpRsh64x16(v)
case OpRsh64x32:
return rewriteValueARM64_OpRsh64x32(v)
case OpRsh64x64:
return rewriteValueARM64_OpRsh64x64(v)
case OpRsh64x8:
return rewriteValueARM64_OpRsh64x8(v)
case OpRsh8Ux16:
return rewriteValueARM64_OpRsh8Ux16(v)
case OpRsh8Ux32:
return rewriteValueARM64_OpRsh8Ux32(v)
case OpRsh8Ux64:
return rewriteValueARM64_OpRsh8Ux64(v)
case OpRsh8Ux8:
return rewriteValueARM64_OpRsh8Ux8(v)
case OpRsh8x16:
return rewriteValueARM64_OpRsh8x16(v)
case OpRsh8x32:
return rewriteValueARM64_OpRsh8x32(v)
case OpRsh8x64:
return rewriteValueARM64_OpRsh8x64(v)
case OpRsh8x8:
return rewriteValueARM64_OpRsh8x8(v)
case OpSelect0:
return rewriteValueARM64_OpSelect0(v)
case OpSelect1:
return rewriteValueARM64_OpSelect1(v)
case OpSignExt16to32:
v.Op = OpARM64MOVHreg
return true
case OpSignExt16to64:
v.Op = OpARM64MOVHreg
return true
case OpSignExt32to64:
v.Op = OpARM64MOVWreg
return true
case OpSignExt8to16:
v.Op = OpARM64MOVBreg
return true
case OpSignExt8to32:
v.Op = OpARM64MOVBreg
return true
case OpSignExt8to64:
v.Op = OpARM64MOVBreg
return true
case OpSlicemask:
return rewriteValueARM64_OpSlicemask(v)
case OpSqrt:
v.Op = OpARM64FSQRTD
return true
case OpStaticCall:
v.Op = OpARM64CALLstatic
return true
case OpStore:
return rewriteValueARM64_OpStore(v)
case OpSub16:
v.Op = OpARM64SUB
return true
case OpSub32:
v.Op = OpARM64SUB
return true
case OpSub32F:
v.Op = OpARM64FSUBS
return true
case OpSub64:
v.Op = OpARM64SUB
return true
case OpSub64F:
v.Op = OpARM64FSUBD
return true
case OpSub8:
v.Op = OpARM64SUB
return true
case OpSubPtr:
v.Op = OpARM64SUB
return true
case OpTrunc:
v.Op = OpARM64FRINTZD
return true
case OpTrunc16to8:
v.Op = OpCopy
return true
case OpTrunc32to16:
v.Op = OpCopy
return true
case OpTrunc32to8:
v.Op = OpCopy
return true
case OpTrunc64to16:
v.Op = OpCopy
return true
case OpTrunc64to32:
v.Op = OpCopy
return true
case OpTrunc64to8:
v.Op = OpCopy
return true
case OpWB:
v.Op = OpARM64LoweredWB
return true
case OpXor16:
v.Op = OpARM64XOR
return true
case OpXor32:
v.Op = OpARM64XOR
return true
case OpXor64:
v.Op = OpARM64XOR
return true
case OpXor8:
v.Op = OpARM64XOR
return true
case OpZero:
return rewriteValueARM64_OpZero(v)
case OpZeroExt16to32:
v.Op = OpARM64MOVHUreg
return true
case OpZeroExt16to64:
v.Op = OpARM64MOVHUreg
return true
case OpZeroExt32to64:
v.Op = OpARM64MOVWUreg
return true
case OpZeroExt8to16:
v.Op = OpARM64MOVBUreg
return true
case OpZeroExt8to32:
v.Op = OpARM64MOVBUreg
return true
case OpZeroExt8to64:
v.Op = OpARM64MOVBUreg
return true
}
return false
}
func rewriteValueARM64_OpARM64ADCSflags(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (ADCzerocarry <typ.UInt64> c))))
// result: (ADCSflags x y c)
for {
x := v_0
y := v_1
if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
break
}
v_2_0 := v_2.Args[0]
if v_2_0.Op != OpARM64ADDSconstflags || v_2_0.AuxInt != -1 {
break
}
v_2_0_0 := v_2_0.Args[0]
if v_2_0_0.Op != OpARM64ADCzerocarry || v_2_0_0.Type != typ.UInt64 {
break
}
c := v_2_0_0.Args[0]
v.reset(OpARM64ADCSflags)
v.AddArg3(x, y, c)
return true
}
// match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (MOVDconst [0]))))
// result: (ADDSflags x y)
for {
x := v_0
y := v_1
if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
break
}
v_2_0 := v_2.Args[0]
if v_2_0.Op != OpARM64ADDSconstflags || v_2_0.AuxInt != -1 {
break
}
v_2_0_0 := v_2_0.Args[0]
if v_2_0_0.Op != OpARM64MOVDconst || v_2_0_0.AuxInt != 0 {
break
}
v.reset(OpARM64ADDSflags)
v.AddArg2(x, y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADD(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (ADD x (MOVDconst [c]))
// result: (ADDconst [c] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
c := v_1.AuxInt
v.reset(OpARM64ADDconst)
v.AuxInt = c
v.AddArg(x)
return true
}
break
}
// match: (ADD a l:(MUL x y))
// cond: l.Uses==1 && clobber(l)
// result: (MADD a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
l := v_1
if l.Op != OpARM64MUL {
continue
}
y := l.Args[1]
x := l.Args[0]
if !(l.Uses == 1 && clobber(l)) {
continue
}
v.reset(OpARM64MADD)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (ADD a l:(MNEG x y))
// cond: l.Uses==1 && clobber(l)
// result: (MSUB a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
l := v_1
if l.Op != OpARM64MNEG {
continue
}
y := l.Args[1]
x := l.Args[0]
if !(l.Uses == 1 && clobber(l)) {
continue
}
v.reset(OpARM64MSUB)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (ADD a l:(MULW x y))
// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
// result: (MADDW a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
l := v_1
if l.Op != OpARM64MULW {
continue
}
y := l.Args[1]
x := l.Args[0]
if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
continue
}
v.reset(OpARM64MADDW)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (ADD a l:(MNEGW x y))
// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
// result: (MSUBW a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
l := v_1
if l.Op != OpARM64MNEGW {
continue
}
y := l.Args[1]
x := l.Args[0]
if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
continue
}
v.reset(OpARM64MSUBW)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (ADD x (NEG y))
// result: (SUB x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64NEG {
continue
}
y := v_1.Args[0]
v.reset(OpARM64SUB)
v.AddArg2(x, y)
return true
}
break
}
// match: (ADD x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (ADDshiftLL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ADDshiftLL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
// match: (ADD x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (ADDshiftRL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ADDshiftRL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
// match: (ADD x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (ADDshiftRA x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ADDshiftRA)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
// match: (ADD (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))))
// cond: cc == OpARM64LessThanU
// result: (ROR x (NEG <t> y))
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
if v_0.Op != OpARM64SLL {
continue
}
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpARM64ANDconst {
continue
}
t := v_0_1.Type
if v_0_1.AuxInt != 63 {
continue
}
y := v_0_1.Args[0]
if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 {
continue
}
cc := v_1.Aux
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt64 {
continue
}
_ = v_1_0.Args[1]
if x != v_1_0.Args[0] {
continue
}
v_1_0_1 := v_1_0.Args[1]
if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
continue
}
_ = v_1_0_1.Args[1]
v_1_0_1_0 := v_1_0_1.Args[0]
if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 64 {
continue
}
v_1_0_1_1 := v_1_0_1.Args[1]
if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 63 || y != v_1_0_1_1.Args[0] {
continue
}
v_1_1 := v_1.Args[1]
if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 {
continue
}
v_1_1_0 := v_1_1.Args[0]
if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
continue
}
_ = v_1_1_0.Args[1]
v_1_1_0_0 := v_1_1_0.Args[0]
if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 64 {
continue
}
v_1_1_0_1 := v_1_1_0.Args[1]
if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 63 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
continue
}
v.reset(OpARM64ROR)
v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
v0.AddArg(y)
v.AddArg2(x, v0)
return true
}
break
}
// match: (ADD (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))))
// cond: cc == OpARM64LessThanU
// result: (ROR x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt64 {
continue
}
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpARM64ANDconst {
continue
}
t := v_0_1.Type
if v_0_1.AuxInt != 63 {
continue
}
y := v_0_1.Args[0]
if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 {
continue
}
cc := v_1.Aux
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
if v_1_0.Op != OpARM64SLL {
continue
}
_ = v_1_0.Args[1]
if x != v_1_0.Args[0] {
continue
}
v_1_0_1 := v_1_0.Args[1]
if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
continue
}
_ = v_1_0_1.Args[1]
v_1_0_1_0 := v_1_0_1.Args[0]
if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 64 {
continue
}
v_1_0_1_1 := v_1_0_1.Args[1]
if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 63 || y != v_1_0_1_1.Args[0] {
continue
}
v_1_1 := v_1.Args[1]
if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 {
continue
}
v_1_1_0 := v_1_1.Args[0]
if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
continue
}
_ = v_1_1_0.Args[1]
v_1_1_0_0 := v_1_1_0.Args[0]
if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 64 {
continue
}
v_1_1_0_1 := v_1_1_0.Args[1]
if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 63 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
continue
}
v.reset(OpARM64ROR)
v.AddArg2(x, y)
return true
}
break
}
// match: (ADD (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))))
// cond: cc == OpARM64LessThanU
// result: (RORW x (NEG <t> y))
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
if v_0.Op != OpARM64SLL {
continue
}
_ = v_0.Args[1]
x := v_0.Args[0]
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpARM64ANDconst {
continue
}
t := v_0_1.Type
if v_0_1.AuxInt != 31 {
continue
}
y := v_0_1.Args[0]
if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 {
continue
}
cc := v_1.Aux
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt32 {
continue
}
_ = v_1_0.Args[1]
v_1_0_0 := v_1_0.Args[0]
if v_1_0_0.Op != OpARM64MOVWUreg || x != v_1_0_0.Args[0] {
continue
}
v_1_0_1 := v_1_0.Args[1]
if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
continue
}
_ = v_1_0_1.Args[1]
v_1_0_1_0 := v_1_0_1.Args[0]
if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 32 {
continue
}
v_1_0_1_1 := v_1_0_1.Args[1]
if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 31 || y != v_1_0_1_1.Args[0] {
continue
}
v_1_1 := v_1.Args[1]
if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 {
continue
}
v_1_1_0 := v_1_1.Args[0]
if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
continue
}
_ = v_1_1_0.Args[1]
v_1_1_0_0 := v_1_1_0.Args[0]
if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 32 {
continue
}
v_1_1_0_1 := v_1_1_0.Args[1]
if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 31 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
continue
}
v.reset(OpARM64RORW)
v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
v0.AddArg(y)
v.AddArg2(x, v0)
return true
}
break
}
// match: (ADD (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))))
// cond: cc == OpARM64LessThanU
// result: (RORW x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt32 {
continue
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
if v_0_0.Op != OpARM64MOVWUreg {
continue
}
x := v_0_0.Args[0]
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpARM64ANDconst {
continue
}
t := v_0_1.Type
if v_0_1.AuxInt != 31 {
continue
}
y := v_0_1.Args[0]
if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 {
continue
}
cc := v_1.Aux
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
if v_1_0.Op != OpARM64SLL {
continue
}
_ = v_1_0.Args[1]
if x != v_1_0.Args[0] {
continue
}
v_1_0_1 := v_1_0.Args[1]
if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t {
continue
}
_ = v_1_0_1.Args[1]
v_1_0_1_0 := v_1_0_1.Args[0]
if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 32 {
continue
}
v_1_0_1_1 := v_1_0_1.Args[1]
if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 31 || y != v_1_0_1_1.Args[0] {
continue
}
v_1_1 := v_1.Args[1]
if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 {
continue
}
v_1_1_0 := v_1_1.Args[0]
if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t {
continue
}
_ = v_1_1_0.Args[1]
v_1_1_0_0 := v_1_1_0.Args[0]
if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 32 {
continue
}
v_1_1_0_1 := v_1_1_0.Args[1]
if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 31 || y != v_1_1_0_1.Args[0] || !(cc == OpARM64LessThanU) {
continue
}
v.reset(OpARM64RORW)
v.AddArg2(x, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64ADDconst(v *Value) bool {
v_0 := v.Args[0]
// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
// cond: is32Bit(off1+int64(off2))
// result: (MOVDaddr [int32(off1)+off2] {sym} ptr)
for {
off1 := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := auxIntToInt32(v_0.AuxInt)
sym := auxToSym(v_0.Aux)
ptr := v_0.Args[0]
if !(is32Bit(off1 + int64(off2))) {
break
}
v.reset(OpARM64MOVDaddr)
v.AuxInt = int32ToAuxInt(int32(off1) + off2)
v.Aux = symToAux(sym)
v.AddArg(ptr)
return true
}
// match: (ADDconst [0] x)
// result: x
for {
if v.AuxInt != 0 {
break
}
x := v_0
v.copyOf(x)
return true
}
// match: (ADDconst [c] (MOVDconst [d]))
// result: (MOVDconst [c+d])
for {
c := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
d := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = c + d
return true
}
// match: (ADDconst [c] (ADDconst [d] x))
// result: (ADDconst [c+d] x)
for {
c := v.AuxInt
if v_0.Op != OpARM64ADDconst {
break
}
d := v_0.AuxInt
x := v_0.Args[0]
v.reset(OpARM64ADDconst)
v.AuxInt = c + d
v.AddArg(x)
return true
}
// match: (ADDconst [c] (SUBconst [d] x))
// result: (ADDconst [c-d] x)
for {
c := v.AuxInt
if v_0.Op != OpARM64SUBconst {
break
}
d := v_0.AuxInt
x := v_0.Args[0]
v.reset(OpARM64ADDconst)
v.AuxInt = c - d
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (ADDshiftLL (MOVDconst [c]) x [d])
// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64ADDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftLL x (MOVDconst [c]) [d])
// result: (ADDconst x [int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ADDconst)
v.AuxInt = int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
// match: (ADDshiftLL [c] (SRLconst x [64-c]) x)
// result: (RORconst [64-c] x)
for {
c := v.AuxInt
if v_0.Op != OpARM64SRLconst || v_0.AuxInt != 64-c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64RORconst)
v.AuxInt = 64 - c
v.AddArg(x)
return true
}
// match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x)
// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
// result: (RORWconst [32-c] x)
for {
t := v.Type
c := v.AuxInt
if v_0.Op != OpARM64UBFX {
break
}
bfc := v_0.AuxInt
x := v_0.Args[0]
if x != v_1 || !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
break
}
v.reset(OpARM64RORWconst)
v.AuxInt = 32 - c
v.AddArg(x)
return true
}
// match: (ADDshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x)
// result: (REV16W x)
for {
if v.Type != typ.UInt16 || v.AuxInt != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || v_0.AuxInt != armBFAuxInt(8, 8) {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64REV16W)
v.AddArg(x)
return true
}
// match: (ADDshiftLL [c] (SRLconst x [64-c]) x2)
// result: (EXTRconst [64-c] x2 x)
for {
c := v.AuxInt
if v_0.Op != OpARM64SRLconst || v_0.AuxInt != 64-c {
break
}
x := v_0.Args[0]
x2 := v_1
v.reset(OpARM64EXTRconst)
v.AuxInt = 64 - c
v.AddArg2(x2, x)
return true
}
// match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2)
// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
// result: (EXTRWconst [32-c] x2 x)
for {
t := v.Type
c := v.AuxInt
if v_0.Op != OpARM64UBFX {
break
}
bfc := v_0.AuxInt
x := v_0.Args[0]
x2 := v_1
if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
break
}
v.reset(OpARM64EXTRWconst)
v.AuxInt = 32 - c
v.AddArg2(x2, x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ADDshiftRA (MOVDconst [c]) x [d])
// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64ADDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftRA x (MOVDconst [c]) [d])
// result: (ADDconst x [c>>uint64(d)])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ADDconst)
v.AuxInt = c >> uint64(d)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ADDshiftRL (MOVDconst [c]) x [d])
// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64ADDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftRL x (MOVDconst [c]) [d])
// result: (ADDconst x [int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ADDconst)
v.AuxInt = int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
// match: (ADDshiftRL [c] (SLLconst x [64-c]) x)
// result: (RORconst [ c] x)
for {
c := v.AuxInt
if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 64-c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64RORconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x))
// cond: c < 32 && t.Size() == 4
// result: (RORWconst [c] x)
for {
t := v.Type
c := v.AuxInt
if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 32-c {
break
}
x := v_0.Args[0]
if v_1.Op != OpARM64MOVWUreg || x != v_1.Args[0] || !(c < 32 && t.Size() == 4) {
break
}
v.reset(OpARM64RORWconst)
v.AuxInt = c
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64AND(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (AND x (MOVDconst [c]))
// result: (ANDconst [c] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = c
v.AddArg(x)
return true
}
break
}
// match: (AND x x)
// result: x
for {
x := v_0
if x != v_1 {
break
}
v.copyOf(x)
return true
}
// match: (AND x (MVN y))
// result: (BIC x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MVN {
continue
}
y := v_1.Args[0]
v.reset(OpARM64BIC)
v.AddArg2(x, y)
return true
}
break
}
// match: (AND x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (ANDshiftLL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ANDshiftLL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
// match: (AND x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (ANDshiftRL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ANDshiftRL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
// match: (AND x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (ANDshiftRA x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ANDshiftRA)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64ANDconst(v *Value) bool {
v_0 := v.Args[0]
// match: (ANDconst [0] _)
// result: (MOVDconst [0])
for {
if v.AuxInt != 0 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (ANDconst [-1] x)
// result: x
for {
if v.AuxInt != -1 {
break
}
x := v_0
v.copyOf(x)
return true
}
// match: (ANDconst [c] (MOVDconst [d]))
// result: (MOVDconst [c&d])
for {
c := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
d := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = c & d
return true
}
// match: (ANDconst [c] (ANDconst [d] x))
// result: (ANDconst [c&d] x)
for {
c := v.AuxInt
if v_0.Op != OpARM64ANDconst {
break
}
d := v_0.AuxInt
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = c & d
v.AddArg(x)
return true
}
// match: (ANDconst [c] (MOVWUreg x))
// result: (ANDconst [c&(1<<32-1)] x)
for {
c := v.AuxInt
if v_0.Op != OpARM64MOVWUreg {
break
}
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = c & (1<<32 - 1)
v.AddArg(x)
return true
}
// match: (ANDconst [c] (MOVHUreg x))
// result: (ANDconst [c&(1<<16-1)] x)
for {
c := v.AuxInt
if v_0.Op != OpARM64MOVHUreg {
break
}
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = c & (1<<16 - 1)
v.AddArg(x)
return true
}
// match: (ANDconst [c] (MOVBUreg x))
// result: (ANDconst [c&(1<<8-1)] x)
for {
c := v.AuxInt
if v_0.Op != OpARM64MOVBUreg {
break
}
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = c & (1<<8 - 1)
v.AddArg(x)
return true
}
// match: (ANDconst [ac] (SLLconst [sc] x))
// cond: isARM64BFMask(sc, ac, sc)
// result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x)
for {
ac := v.AuxInt
if v_0.Op != OpARM64SLLconst {
break
}
sc := v_0.AuxInt
x := v_0.Args[0]
if !(isARM64BFMask(sc, ac, sc)) {
break
}
v.reset(OpARM64UBFIZ)
v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, sc))
v.AddArg(x)
return true
}
// match: (ANDconst [ac] (SRLconst [sc] x))
// cond: isARM64BFMask(sc, ac, 0)
// result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x)
for {
ac := v.AuxInt
if v_0.Op != OpARM64SRLconst {
break
}
sc := v_0.AuxInt
x := v_0.Args[0]
if !(isARM64BFMask(sc, ac, 0)) {
break
}
v.reset(OpARM64UBFX)
v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, 0))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ANDshiftLL (MOVDconst [c]) x [d])
// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64ANDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftLL x (MOVDconst [c]) [d])
// result: (ANDconst x [int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
// match: (ANDshiftLL x y:(SLLconst x [c]) [d])
// cond: c==d
// result: y
for {
d := v.AuxInt
x := v_0
y := v_1
if y.Op != OpARM64SLLconst {
break
}
c := y.AuxInt
if x != y.Args[0] || !(c == d) {
break
}
v.copyOf(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ANDshiftRA (MOVDconst [c]) x [d])
// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64ANDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftRA x (MOVDconst [c]) [d])
// result: (ANDconst x [c>>uint64(d)])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = c >> uint64(d)
v.AddArg(x)
return true
}
// match: (ANDshiftRA x y:(SRAconst x [c]) [d])
// cond: c==d
// result: y
for {
d := v.AuxInt
x := v_0
y := v_1
if y.Op != OpARM64SRAconst {
break
}
c := y.AuxInt
if x != y.Args[0] || !(c == d) {
break
}
v.copyOf(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ANDshiftRL (MOVDconst [c]) x [d])
// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64ANDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftRL x (MOVDconst [c]) [d])
// result: (ANDconst x [int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
// match: (ANDshiftRL x y:(SRLconst x [c]) [d])
// cond: c==d
// result: y
for {
d := v.AuxInt
x := v_0
y := v_1
if y.Op != OpARM64SRLconst {
break
}
c := y.AuxInt
if x != y.Args[0] || !(c == d) {
break
}
v.copyOf(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64BIC(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BIC x (MOVDconst [c]))
// result: (ANDconst [^c] x)
for {
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = ^c
v.AddArg(x)
return true
}
// match: (BIC x x)
// result: (MOVDconst [0])
for {
x := v_0
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (BIC x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (BICshiftLL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64BICshiftLL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
// match: (BIC x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (BICshiftRL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64BICshiftRL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
// match: (BIC x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (BICshiftRA x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64BICshiftRA)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftLL x (MOVDconst [c]) [d])
// result: (ANDconst x [^int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = ^int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
// match: (BICshiftLL x (SLLconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [0])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64SLLconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] || !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftRA x (MOVDconst [c]) [d])
// result: (ANDconst x [^(c>>uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = ^(c >> uint64(d))
v.AddArg(x)
return true
}
// match: (BICshiftRA x (SRAconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [0])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64SRAconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] || !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftRL x (MOVDconst [c]) [d])
// result: (ANDconst x [^int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = ^int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
// match: (BICshiftRL x (SRLconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [0])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64SRLconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] || !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64CMN(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CMN x (MOVDconst [c]))
// result: (CMNconst [c] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
c := v_1.AuxInt
v.reset(OpARM64CMNconst)
v.AuxInt = c
v.AddArg(x)
return true
}
break
}
// match: (CMN x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMNshiftLL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64CMNshiftLL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
// match: (CMN x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMNshiftRL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64CMNshiftRL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
// match: (CMN x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMNshiftRA x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
continue
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64CMNshiftRA)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64CMNW(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CMNW x (MOVDconst [c]))
// result: (CMNWconst [c] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
c := v_1.AuxInt
v.reset(OpARM64CMNWconst)
v.AuxInt = c
v.AddArg(x)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64CMNWconst(v *Value) bool {
v_0 := v.Args[0]
// match: (CMNWconst (MOVDconst [x]) [y])
// result: (FlagConstant [addFlags32(int32(x),y)])
for {
y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
x := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(addFlags32(int32(x), y))
return true
}
return false
}
func rewriteValueARM64_OpARM64CMNconst(v *Value) bool {
v_0 := v.Args[0]
// match: (CMNconst (MOVDconst [x]) [y])
// result: (FlagConstant [addFlags64(x,y)])
for {
y := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
x := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(addFlags64(x, y))
return true
}
return false
}
func rewriteValueARM64_OpARM64CMNshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMNshiftLL (MOVDconst [c]) x [d])
// result: (CMNconst [c] (SLLconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64CMNconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMNshiftLL x (MOVDconst [c]) [d])
// result: (CMNconst x [int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMNconst)
v.AuxInt = int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMNshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMNshiftRA (MOVDconst [c]) x [d])
// result: (CMNconst [c] (SRAconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64CMNconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMNshiftRA x (MOVDconst [c]) [d])
// result: (CMNconst x [c>>uint64(d)])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMNconst)
v.AuxInt = c >> uint64(d)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMNshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMNshiftRL (MOVDconst [c]) x [d])
// result: (CMNconst [c] (SRLconst <x.Type> x [d]))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64CMNconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMNshiftRL x (MOVDconst [c]) [d])
// result: (CMNconst x [int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMNconst)
v.AuxInt = int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMP(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMP x (MOVDconst [c]))
// result: (CMPconst [c] x)
for {
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (CMP (MOVDconst [c]) x)
// result: (InvertFlags (CMPconst [c] x))
for {
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
v0.AuxInt = c
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMP x y)
// cond: x.ID > y.ID
// result: (InvertFlags (CMP y x))
for {
x := v_0
y := v_1
if !(x.ID > y.ID) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
v0.AddArg2(y, x)
v.AddArg(v0)
return true
}
// match: (CMP x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMPshiftLL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64CMPshiftLL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
// match: (CMP x0:(SLLconst [c] y) x1)
// cond: clobberIfDead(x0)
// result: (InvertFlags (CMPshiftLL x1 y [c]))
for {
x0 := v_0
if x0.Op != OpARM64SLLconst {
break
}
c := x0.AuxInt
y := x0.Args[0]
x1 := v_1
if !(clobberIfDead(x0)) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags)
v0.AuxInt = c
v0.AddArg2(x1, y)
v.AddArg(v0)
return true
}
// match: (CMP x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMPshiftRL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64CMPshiftRL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
// match: (CMP x0:(SRLconst [c] y) x1)
// cond: clobberIfDead(x0)
// result: (InvertFlags (CMPshiftRL x1 y [c]))
for {
x0 := v_0
if x0.Op != OpARM64SRLconst {
break
}
c := x0.AuxInt
y := x0.Args[0]
x1 := v_1
if !(clobberIfDead(x0)) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags)
v0.AuxInt = c
v0.AddArg2(x1, y)
v.AddArg(v0)
return true
}
// match: (CMP x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMPshiftRA x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64CMPshiftRA)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
// match: (CMP x0:(SRAconst [c] y) x1)
// cond: clobberIfDead(x0)
// result: (InvertFlags (CMPshiftRA x1 y [c]))
for {
x0 := v_0
if x0.Op != OpARM64SRAconst {
break
}
c := x0.AuxInt
y := x0.Args[0]
x1 := v_1
if !(clobberIfDead(x0)) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags)
v0.AuxInt = c
v0.AddArg2(x1, y)
v.AddArg(v0)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPW(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMPW x (MOVDconst [c]))
// result: (CMPWconst [int32(c)] x)
for {
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMPWconst)
v.AuxInt = int32ToAuxInt(int32(c))
v.AddArg(x)
return true
}
// match: (CMPW (MOVDconst [c]) x)
// result: (InvertFlags (CMPWconst [int32(c)] x))
for {
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
v0.AuxInt = int32ToAuxInt(int32(c))
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMPW x y)
// cond: x.ID > y.ID
// result: (InvertFlags (CMPW y x))
for {
x := v_0
y := v_1
if !(x.ID > y.ID) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
v0.AddArg2(y, x)
v.AddArg(v0)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPWconst(v *Value) bool {
v_0 := v.Args[0]
// match: (CMPWconst (MOVDconst [x]) [y])
// result: (FlagConstant [subFlags32(int32(x),y)])
for {
y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
x := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags32(int32(x), y))
return true
}
// match: (CMPWconst (MOVBUreg _) [c])
// cond: 0xff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPWconst (MOVHUreg _) [c])
// cond: 0xffff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPconst(v *Value) bool {
v_0 := v.Args[0]
// match: (CMPconst (MOVDconst [x]) [y])
// result: (FlagConstant [subFlags64(x,y)])
for {
y := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
x := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(x, y))
return true
}
// match: (CMPconst (MOVBUreg _) [c])
// cond: 0xff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPconst (MOVHUreg _) [c])
// cond: 0xffff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPconst (MOVWUreg _) [c])
// cond: 0xffffffff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVWUreg || !(0xffffffff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPconst (ANDconst _ [m]) [n])
// cond: 0 <= m && m < n
// result: (FlagConstant [subFlags64(0,1)])
for {
n := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64ANDconst {
break
}
m := auxIntToInt64(v_0.AuxInt)
if !(0 <= m && m < n) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPconst (SRLconst _ [c]) [n])
// cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)
// result: (FlagConstant [subFlags64(0,1)])
for {
n := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SRLconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMPshiftLL (MOVDconst [c]) x [d])
// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
v0.AuxInt = c
v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
v1.AuxInt = d
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftLL x (MOVDconst [c]) [d])
// result: (CMPconst x [int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPconst)
v.AuxInt = int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMPshiftRA (MOVDconst [c]) x [d])
// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
v0.AuxInt = c
v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
v1.AuxInt = d
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftRA x (MOVDconst [c]) [d])
// result: (CMPconst x [c>>uint64(d)])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPconst)
v.AuxInt = c >> uint64(d)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMPshiftRL (MOVDconst [c]) x [d])
// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
for {
d := v.AuxInt
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
v0.AuxInt = c
v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
v1.AuxInt = d
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftRL x (MOVDconst [c]) [d])
// result: (CMPconst x [int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPconst)
v.AuxInt = int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CSEL(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CSEL {cc} x (MOVDconst [0]) flag)
// result: (CSEL0 {cc} x flag)
for {
cc := v.Aux
x := v_0
if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 {
break
}
flag := v_2
v.reset(OpARM64CSEL0)
v.Aux = cc
v.AddArg2(x, flag)
return true
}
// match: (CSEL {cc} (MOVDconst [0]) y flag)
// result: (CSEL0 {arm64Negate(cc.(Op))} y flag)
for {
cc := v.Aux
if v_0.Op != OpARM64MOVDconst || v_0.AuxInt != 0 {
break
}
y := v_1
flag := v_2
v.reset(OpARM64CSEL0)
v.Aux = arm64Negate(cc.(Op))
v.AddArg2(y, flag)
return true
}
// match: (CSEL {cc} x y (InvertFlags cmp))
// result: (CSEL {arm64Invert(cc)} x y cmp)
for {
cc := auxToCCop(v.Aux)
x := v_0
y := v_1
if v_2.Op != OpARM64InvertFlags {
break
}
cmp := v_2.Args[0]
v.reset(OpARM64CSEL)
v.Aux = cCopToAux(arm64Invert(cc))
v.AddArg3(x, y, cmp)
return true
}
// match: (CSEL {cc} x _ flag)
// cond: ccARM64Eval(cc, flag) > 0
// result: x
for {
cc := auxToCCop(v.Aux)
x := v_0
flag := v_2
if !(ccARM64Eval(cc, flag) > 0) {
break
}
v.copyOf(x)
return true
}
// match: (CSEL {cc} _ y flag)
// cond: ccARM64Eval(cc, flag) < 0
// result: y
for {
cc := auxToCCop(v.Aux)
y := v_1
flag := v_2
if !(ccARM64Eval(cc, flag) < 0) {
break
}
v.copyOf(y)
return true
}
// match: (CSEL {cc} x y (CMPWconst [0] boolval))
// cond: cc == OpARM64NotEqual && flagArg(boolval) != nil
// result: (CSEL {boolval.Op} x y flagArg(boolval))
for {
cc := auxToCCop(v.Aux)
x := v_0
y := v_1
if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
break
}
boolval := v_2.Args[0]
if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
break
}
v.reset(OpARM64CSEL)
v.Aux = cCopToAux(boolval.Op)
v.AddArg3(x, y, flagArg(boolval))
return true
}
// match: (CSEL {cc} x y (CMPWconst [0] boolval))
// cond: cc == OpARM64Equal && flagArg(boolval) != nil
// result: (CSEL {arm64Negate(boolval.Op)} x y flagArg(boolval))
for {
cc := auxToCCop(v.Aux)
x := v_0
y := v_1
if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
break
}
boolval := v_2.Args[0]
if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
break
}
v.reset(OpARM64CSEL)
v.Aux = cCopToAux(arm64Negate(boolval.Op))
v.AddArg3(x, y, flagArg(boolval))
return true
}
return false
}
func rewriteValueARM64_OpARM64CSEL0(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CSEL0 {cc} x (InvertFlags cmp))
// result: (CSEL0 {arm64Invert(cc)} x cmp)
for {
cc := auxToCCop(v.Aux)
x := v_0
if v_1.Op != OpARM64InvertFlags {
break
}
cmp := v_1.Args[0]
v.reset(OpARM64CSEL0)
v.Aux = cCopToAux(arm64Invert(cc))
v.AddArg2(x, cmp)
return true
}
// match: (CSEL0 {cc} x flag)
// cond: ccARM64Eval(cc, flag) > 0
// result: x
for {
cc := auxToCCop(v.Aux)
x := v_0
flag := v_1
if !(ccARM64Eval(cc, flag) > 0) {
break
}
v.copyOf(x)
return true
}
// match: (CSEL0 {cc} _ flag)
// cond: ccARM64Eval(cc, flag) < 0
// result: (MOVDconst [0])
for {
cc := auxToCCop(v.Aux)
flag := v_1
if !(ccARM64Eval(cc, flag) < 0) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (CSEL0 {cc} x (CMPWconst [0] boolval))
// cond: cc == OpARM64NotEqual && flagArg(boolval) != nil
// result: (CSEL0 {boolval.Op} x flagArg(boolval))
for {
cc := auxToCCop(v.Aux)
x := v_0
if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
break
}
boolval := v_1.Args[0]
if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
break
}
v.reset(OpARM64CSEL0)
v.Aux = cCopToAux(boolval.Op)
v.AddArg2(x, flagArg(boolval))
return true
}
// match: (CSEL0 {cc} x (CMPWconst [0] boolval))
// cond: cc == OpARM64Equal && flagArg(boolval) != nil
// result: (CSEL0 {arm64Negate(boolval.Op)} x flagArg(boolval))
for {
cc := auxToCCop(v.Aux)
x := v_0
if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
break
}
boolval := v_1.Args[0]
if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
break
}
v.reset(OpARM64CSEL0)
v.Aux = cCopToAux(arm64Negate(boolval.Op))
v.AddArg2(x, flagArg(boolval))
return true
}
return false
}
func rewriteValueARM64_OpARM64DIV(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (DIV (MOVDconst [c]) (MOVDconst [d]))
// result: (MOVDconst [c/d])
for {
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
if v_1.Op != OpARM64MOVDconst {
break
}
d := v_1.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = c / d
return true
}
return false
}
func rewriteValueARM64_OpARM64DIVW(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (DIVW (MOVDconst [c]) (MOVDconst [d]))
// result: (MOVDconst [int64(int32(c)/int32(d))])
for {
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
if v_1.Op != OpARM64MOVDconst {
break
}
d := v_1.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = int64(int32(c) / int32(d))
return true
}
return false
}
func rewriteValueARM64_OpARM64EON(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EON x (MOVDconst [c]))
// result: (XORconst [^c] x)
for {
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64XORconst)
v.AuxInt = ^c
v.AddArg(x)
return true
}
// match: (EON x x)
// result: (MOVDconst [-1])
for {
x := v_0
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = -1
return true
}
// match: (EON x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (EONshiftLL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64EONshiftLL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
// match: (EON x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (EONshiftRL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64EONshiftRL)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
// match: (EON x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (EONshiftRA x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
break
}
c := x1.AuxInt
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64EONshiftRA)
v.AuxInt = c
v.AddArg2(x0, y)
return true
}
return false
}
func rewriteValueARM64_OpARM64EONshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EONshiftLL x (MOVDconst [c]) [d])
// result: (XORconst x [^int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64XORconst)
v.AuxInt = ^int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
// match: (EONshiftLL x (SLLconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [-1])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64SLLconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] || !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = -1
return true
}
return false
}
func rewriteValueARM64_OpARM64EONshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EONshiftRA x (MOVDconst [c]) [d])
// result: (XORconst x [^(c>>uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64XORconst)
v.AuxInt = ^(c >> uint64(d))
v.AddArg(x)
return true
}
// match: (EONshiftRA x (SRAconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [-1])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64SRAconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] || !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = -1
return true
}
return false
}
func rewriteValueARM64_OpARM64EONshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EONshiftRL x (MOVDconst [c]) [d])
// result: (XORconst x [^int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64XORconst)
v.AuxInt = ^int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
// match: (EONshiftRL x (SRLconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [-1])
for {
d := v.AuxInt
x := v_0
if v_1.Op != OpARM64SRLconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] || !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = -1
return true
}
return false
}
func rewriteValueARM64_OpARM64Equal(v *Value) bool {
v_0 := v.Args[0]
// match: (Equal (FlagConstant [fc]))
// result: (MOVDconst [b2i(fc.eq())])
for {
if v_0.Op != OpARM64FlagConstant {
break
}
fc := auxIntToFlagConstant(v_0.AuxInt)
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(b2i(fc.eq()))
return true
}
// match: (Equal (InvertFlags x))
// result: (Equal x)
for {
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64Equal)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64FADDD(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (FADDD a (FMULD x y))
// result: (FMADDD a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
if v_1.Op != OpARM64FMULD {
continue
}
y := v_1.Args[1]
x := v_1.Args[0]
v.reset(OpARM64FMADDD)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (FADDD a (FNMULD x y))
// result: (FMSUBD a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
if v_1.Op != OpARM64FNMULD {
continue
}
y := v_1.Args[1]
x := v_1.Args[0]
v.reset(OpARM64FMSUBD)
v.AddArg3(a, x, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64FADDS(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (FADDS a (FMULS x y))
// result: (FMADDS a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
if v_1.Op != OpARM64FMULS {
continue
}
y := v_1.Args[1]
x := v_1.Args[0]
v.reset(OpARM64FMADDS)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (FADDS a (FNMULS x y))
// result: (FMSUBS a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
if v_1.Op != OpARM64FNMULS {
continue
}
y := v_1.Args[1]
x := v_1.Args[0]
v.reset(OpARM64FMSUBS)
v.AddArg3(a, x, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64FCMPD(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (FCMPD x (FMOVDconst [0]))
// result: (FCMPD0 x)
for {
x := v_0
if v_1.Op != OpARM64FMOVDconst || auxIntToFloat64(v_1.AuxInt) != 0 {
break
}
v.reset(OpARM64FCMPD0)
v.AddArg(x)
return true
}
// match: (FCMPD (FMOVDconst [0]) x)
// result: (InvertFlags (FCMPD0 x))
for {
if v_0.Op != OpARM64FMOVDconst || auxIntToFloat64(v_0.AuxInt) != 0 {
break
}
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64FCMPD0, types.TypeFlags)
v0.AddArg(x)
v.AddArg(v0)
return true
}
return false
}
func rewriteValueARM64_OpARM64FCMPS(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (FCMPS x (FMOVSconst [0]))
// result: (FCMPS0 x)
for {
x := v_0
if v_1.Op != OpARM64FMOVSconst || auxIntToFloat64(v_1.AuxInt) != 0 {
break
}
v.reset(OpARM64FCMPS0)
v.AddArg(x)
return true
}
// match: (FCMPS (FMOVSconst [0]) x)
// result: (InvertFlags (FCMPS0 x))
for {
if v_0.Op != OpARM64FMOVSconst || auxIntToFloat64(v_0.AuxInt) != 0 {
break
}
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64FCMPS0, types.TypeFlags)
v0.AddArg(x)
v.AddArg(v0)
return true
}
return false
}
func rewriteValueARM64_OpARM64FMOVDfpgp(v *Value) bool {
v_0 := v.Args[0]
b := v.Block
// match: (FMOVDfpgp <t> (Arg [off] {sym}))
// result: @b.Func.Entry (Arg <t> [off] {sym})
for {
t := v.Type
if v_0.Op != OpArg {
break
}
off := auxIntToInt32(v_0.AuxInt)
sym := auxToSym(v_0.Aux)
b = b.Func.Entry
v0 := b.NewValue0(v.Pos, OpArg, t)
v.copyOf(v0)
v0.AuxInt = int32ToAuxInt(off)
v0.Aux = symToAux(sym)
return true
}
return false
}
func rewriteValueARM64_OpARM64FMOVDgpfp(v *Value) bool {
v_0 := v.Args[0]
b := v.Block
// match: (FMOVDgpfp <t> (Arg [off] {sym}))
// result: @b.Func.Entry (Arg <t> [off] {sym})
for {
t := v.Type
if v_0.Op != OpArg {
break
}
off := auxIntToInt32(v_0.AuxInt)
sym := auxToSym(v_0.Aux)
b = b.Func.Entry
v0 := b.NewValue0(v.Pos, OpArg, t)
v.copyOf(v0)
v0.AuxInt = int32ToAuxInt(off)
v0.Aux = symToAux(sym)
return true
}
return false
}
func rewriteValueARM64_OpARM64FMOVDload(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
config := b.Func.Config
// match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
// result: (FMOVDgpfp val)
for {
off := auxIntToInt32(v.AuxInt)