blob: 571046b5b8331ce7d158831911b1fbfeac42949f [file] [log] [blame] [edit]
// Code generated from _gen/ARM64.rules using 'go generate'; DO NOT EDIT.
package ssa
import "cmd/compile/internal/types"
func rewriteValueARM64(v *Value) bool {
switch v.Op {
case OpARM64ADCSflags:
return rewriteValueARM64_OpARM64ADCSflags(v)
case OpARM64ADD:
return rewriteValueARM64_OpARM64ADD(v)
case OpARM64ADDSflags:
return rewriteValueARM64_OpARM64ADDSflags(v)
case OpARM64ADDconst:
return rewriteValueARM64_OpARM64ADDconst(v)
case OpARM64ADDshiftLL:
return rewriteValueARM64_OpARM64ADDshiftLL(v)
case OpARM64ADDshiftRA:
return rewriteValueARM64_OpARM64ADDshiftRA(v)
case OpARM64ADDshiftRL:
return rewriteValueARM64_OpARM64ADDshiftRL(v)
case OpARM64AND:
return rewriteValueARM64_OpARM64AND(v)
case OpARM64ANDconst:
return rewriteValueARM64_OpARM64ANDconst(v)
case OpARM64ANDshiftLL:
return rewriteValueARM64_OpARM64ANDshiftLL(v)
case OpARM64ANDshiftRA:
return rewriteValueARM64_OpARM64ANDshiftRA(v)
case OpARM64ANDshiftRL:
return rewriteValueARM64_OpARM64ANDshiftRL(v)
case OpARM64ANDshiftRO:
return rewriteValueARM64_OpARM64ANDshiftRO(v)
case OpARM64BIC:
return rewriteValueARM64_OpARM64BIC(v)
case OpARM64BICshiftLL:
return rewriteValueARM64_OpARM64BICshiftLL(v)
case OpARM64BICshiftRA:
return rewriteValueARM64_OpARM64BICshiftRA(v)
case OpARM64BICshiftRL:
return rewriteValueARM64_OpARM64BICshiftRL(v)
case OpARM64BICshiftRO:
return rewriteValueARM64_OpARM64BICshiftRO(v)
case OpARM64CMN:
return rewriteValueARM64_OpARM64CMN(v)
case OpARM64CMNW:
return rewriteValueARM64_OpARM64CMNW(v)
case OpARM64CMNWconst:
return rewriteValueARM64_OpARM64CMNWconst(v)
case OpARM64CMNconst:
return rewriteValueARM64_OpARM64CMNconst(v)
case OpARM64CMNshiftLL:
return rewriteValueARM64_OpARM64CMNshiftLL(v)
case OpARM64CMNshiftRA:
return rewriteValueARM64_OpARM64CMNshiftRA(v)
case OpARM64CMNshiftRL:
return rewriteValueARM64_OpARM64CMNshiftRL(v)
case OpARM64CMP:
return rewriteValueARM64_OpARM64CMP(v)
case OpARM64CMPW:
return rewriteValueARM64_OpARM64CMPW(v)
case OpARM64CMPWconst:
return rewriteValueARM64_OpARM64CMPWconst(v)
case OpARM64CMPconst:
return rewriteValueARM64_OpARM64CMPconst(v)
case OpARM64CMPshiftLL:
return rewriteValueARM64_OpARM64CMPshiftLL(v)
case OpARM64CMPshiftRA:
return rewriteValueARM64_OpARM64CMPshiftRA(v)
case OpARM64CMPshiftRL:
return rewriteValueARM64_OpARM64CMPshiftRL(v)
case OpARM64CSEL:
return rewriteValueARM64_OpARM64CSEL(v)
case OpARM64CSEL0:
return rewriteValueARM64_OpARM64CSEL0(v)
case OpARM64CSETM:
return rewriteValueARM64_OpARM64CSETM(v)
case OpARM64CSINC:
return rewriteValueARM64_OpARM64CSINC(v)
case OpARM64CSINV:
return rewriteValueARM64_OpARM64CSINV(v)
case OpARM64CSNEG:
return rewriteValueARM64_OpARM64CSNEG(v)
case OpARM64DIV:
return rewriteValueARM64_OpARM64DIV(v)
case OpARM64DIVW:
return rewriteValueARM64_OpARM64DIVW(v)
case OpARM64EON:
return rewriteValueARM64_OpARM64EON(v)
case OpARM64EONshiftLL:
return rewriteValueARM64_OpARM64EONshiftLL(v)
case OpARM64EONshiftRA:
return rewriteValueARM64_OpARM64EONshiftRA(v)
case OpARM64EONshiftRL:
return rewriteValueARM64_OpARM64EONshiftRL(v)
case OpARM64EONshiftRO:
return rewriteValueARM64_OpARM64EONshiftRO(v)
case OpARM64Equal:
return rewriteValueARM64_OpARM64Equal(v)
case OpARM64FADDD:
return rewriteValueARM64_OpARM64FADDD(v)
case OpARM64FADDS:
return rewriteValueARM64_OpARM64FADDS(v)
case OpARM64FCMPD:
return rewriteValueARM64_OpARM64FCMPD(v)
case OpARM64FCMPS:
return rewriteValueARM64_OpARM64FCMPS(v)
case OpARM64FMOVDfpgp:
return rewriteValueARM64_OpARM64FMOVDfpgp(v)
case OpARM64FMOVDgpfp:
return rewriteValueARM64_OpARM64FMOVDgpfp(v)
case OpARM64FMOVDload:
return rewriteValueARM64_OpARM64FMOVDload(v)
case OpARM64FMOVDloadidx:
return rewriteValueARM64_OpARM64FMOVDloadidx(v)
case OpARM64FMOVDloadidx8:
return rewriteValueARM64_OpARM64FMOVDloadidx8(v)
case OpARM64FMOVDstore:
return rewriteValueARM64_OpARM64FMOVDstore(v)
case OpARM64FMOVDstoreidx:
return rewriteValueARM64_OpARM64FMOVDstoreidx(v)
case OpARM64FMOVDstoreidx8:
return rewriteValueARM64_OpARM64FMOVDstoreidx8(v)
case OpARM64FMOVSload:
return rewriteValueARM64_OpARM64FMOVSload(v)
case OpARM64FMOVSloadidx:
return rewriteValueARM64_OpARM64FMOVSloadidx(v)
case OpARM64FMOVSloadidx4:
return rewriteValueARM64_OpARM64FMOVSloadidx4(v)
case OpARM64FMOVSstore:
return rewriteValueARM64_OpARM64FMOVSstore(v)
case OpARM64FMOVSstoreidx:
return rewriteValueARM64_OpARM64FMOVSstoreidx(v)
case OpARM64FMOVSstoreidx4:
return rewriteValueARM64_OpARM64FMOVSstoreidx4(v)
case OpARM64FMULD:
return rewriteValueARM64_OpARM64FMULD(v)
case OpARM64FMULS:
return rewriteValueARM64_OpARM64FMULS(v)
case OpARM64FNEGD:
return rewriteValueARM64_OpARM64FNEGD(v)
case OpARM64FNEGS:
return rewriteValueARM64_OpARM64FNEGS(v)
case OpARM64FNMULD:
return rewriteValueARM64_OpARM64FNMULD(v)
case OpARM64FNMULS:
return rewriteValueARM64_OpARM64FNMULS(v)
case OpARM64FSUBD:
return rewriteValueARM64_OpARM64FSUBD(v)
case OpARM64FSUBS:
return rewriteValueARM64_OpARM64FSUBS(v)
case OpARM64GreaterEqual:
return rewriteValueARM64_OpARM64GreaterEqual(v)
case OpARM64GreaterEqualF:
return rewriteValueARM64_OpARM64GreaterEqualF(v)
case OpARM64GreaterEqualNoov:
return rewriteValueARM64_OpARM64GreaterEqualNoov(v)
case OpARM64GreaterEqualU:
return rewriteValueARM64_OpARM64GreaterEqualU(v)
case OpARM64GreaterThan:
return rewriteValueARM64_OpARM64GreaterThan(v)
case OpARM64GreaterThanF:
return rewriteValueARM64_OpARM64GreaterThanF(v)
case OpARM64GreaterThanU:
return rewriteValueARM64_OpARM64GreaterThanU(v)
case OpARM64LDP:
return rewriteValueARM64_OpARM64LDP(v)
case OpARM64LessEqual:
return rewriteValueARM64_OpARM64LessEqual(v)
case OpARM64LessEqualF:
return rewriteValueARM64_OpARM64LessEqualF(v)
case OpARM64LessEqualU:
return rewriteValueARM64_OpARM64LessEqualU(v)
case OpARM64LessThan:
return rewriteValueARM64_OpARM64LessThan(v)
case OpARM64LessThanF:
return rewriteValueARM64_OpARM64LessThanF(v)
case OpARM64LessThanNoov:
return rewriteValueARM64_OpARM64LessThanNoov(v)
case OpARM64LessThanU:
return rewriteValueARM64_OpARM64LessThanU(v)
case OpARM64MADD:
return rewriteValueARM64_OpARM64MADD(v)
case OpARM64MADDW:
return rewriteValueARM64_OpARM64MADDW(v)
case OpARM64MNEG:
return rewriteValueARM64_OpARM64MNEG(v)
case OpARM64MNEGW:
return rewriteValueARM64_OpARM64MNEGW(v)
case OpARM64MOD:
return rewriteValueARM64_OpARM64MOD(v)
case OpARM64MODW:
return rewriteValueARM64_OpARM64MODW(v)
case OpARM64MOVBUload:
return rewriteValueARM64_OpARM64MOVBUload(v)
case OpARM64MOVBUloadidx:
return rewriteValueARM64_OpARM64MOVBUloadidx(v)
case OpARM64MOVBUreg:
return rewriteValueARM64_OpARM64MOVBUreg(v)
case OpARM64MOVBload:
return rewriteValueARM64_OpARM64MOVBload(v)
case OpARM64MOVBloadidx:
return rewriteValueARM64_OpARM64MOVBloadidx(v)
case OpARM64MOVBreg:
return rewriteValueARM64_OpARM64MOVBreg(v)
case OpARM64MOVBstore:
return rewriteValueARM64_OpARM64MOVBstore(v)
case OpARM64MOVBstoreidx:
return rewriteValueARM64_OpARM64MOVBstoreidx(v)
case OpARM64MOVBstorezero:
return rewriteValueARM64_OpARM64MOVBstorezero(v)
case OpARM64MOVBstorezeroidx:
return rewriteValueARM64_OpARM64MOVBstorezeroidx(v)
case OpARM64MOVDload:
return rewriteValueARM64_OpARM64MOVDload(v)
case OpARM64MOVDloadidx:
return rewriteValueARM64_OpARM64MOVDloadidx(v)
case OpARM64MOVDloadidx8:
return rewriteValueARM64_OpARM64MOVDloadidx8(v)
case OpARM64MOVDnop:
return rewriteValueARM64_OpARM64MOVDnop(v)
case OpARM64MOVDreg:
return rewriteValueARM64_OpARM64MOVDreg(v)
case OpARM64MOVDstore:
return rewriteValueARM64_OpARM64MOVDstore(v)
case OpARM64MOVDstoreidx:
return rewriteValueARM64_OpARM64MOVDstoreidx(v)
case OpARM64MOVDstoreidx8:
return rewriteValueARM64_OpARM64MOVDstoreidx8(v)
case OpARM64MOVDstorezero:
return rewriteValueARM64_OpARM64MOVDstorezero(v)
case OpARM64MOVDstorezeroidx:
return rewriteValueARM64_OpARM64MOVDstorezeroidx(v)
case OpARM64MOVDstorezeroidx8:
return rewriteValueARM64_OpARM64MOVDstorezeroidx8(v)
case OpARM64MOVHUload:
return rewriteValueARM64_OpARM64MOVHUload(v)
case OpARM64MOVHUloadidx:
return rewriteValueARM64_OpARM64MOVHUloadidx(v)
case OpARM64MOVHUloadidx2:
return rewriteValueARM64_OpARM64MOVHUloadidx2(v)
case OpARM64MOVHUreg:
return rewriteValueARM64_OpARM64MOVHUreg(v)
case OpARM64MOVHload:
return rewriteValueARM64_OpARM64MOVHload(v)
case OpARM64MOVHloadidx:
return rewriteValueARM64_OpARM64MOVHloadidx(v)
case OpARM64MOVHloadidx2:
return rewriteValueARM64_OpARM64MOVHloadidx2(v)
case OpARM64MOVHreg:
return rewriteValueARM64_OpARM64MOVHreg(v)
case OpARM64MOVHstore:
return rewriteValueARM64_OpARM64MOVHstore(v)
case OpARM64MOVHstoreidx:
return rewriteValueARM64_OpARM64MOVHstoreidx(v)
case OpARM64MOVHstoreidx2:
return rewriteValueARM64_OpARM64MOVHstoreidx2(v)
case OpARM64MOVHstorezero:
return rewriteValueARM64_OpARM64MOVHstorezero(v)
case OpARM64MOVHstorezeroidx:
return rewriteValueARM64_OpARM64MOVHstorezeroidx(v)
case OpARM64MOVHstorezeroidx2:
return rewriteValueARM64_OpARM64MOVHstorezeroidx2(v)
case OpARM64MOVQstorezero:
return rewriteValueARM64_OpARM64MOVQstorezero(v)
case OpARM64MOVWUload:
return rewriteValueARM64_OpARM64MOVWUload(v)
case OpARM64MOVWUloadidx:
return rewriteValueARM64_OpARM64MOVWUloadidx(v)
case OpARM64MOVWUloadidx4:
return rewriteValueARM64_OpARM64MOVWUloadidx4(v)
case OpARM64MOVWUreg:
return rewriteValueARM64_OpARM64MOVWUreg(v)
case OpARM64MOVWload:
return rewriteValueARM64_OpARM64MOVWload(v)
case OpARM64MOVWloadidx:
return rewriteValueARM64_OpARM64MOVWloadidx(v)
case OpARM64MOVWloadidx4:
return rewriteValueARM64_OpARM64MOVWloadidx4(v)
case OpARM64MOVWreg:
return rewriteValueARM64_OpARM64MOVWreg(v)
case OpARM64MOVWstore:
return rewriteValueARM64_OpARM64MOVWstore(v)
case OpARM64MOVWstoreidx:
return rewriteValueARM64_OpARM64MOVWstoreidx(v)
case OpARM64MOVWstoreidx4:
return rewriteValueARM64_OpARM64MOVWstoreidx4(v)
case OpARM64MOVWstorezero:
return rewriteValueARM64_OpARM64MOVWstorezero(v)
case OpARM64MOVWstorezeroidx:
return rewriteValueARM64_OpARM64MOVWstorezeroidx(v)
case OpARM64MOVWstorezeroidx4:
return rewriteValueARM64_OpARM64MOVWstorezeroidx4(v)
case OpARM64MSUB:
return rewriteValueARM64_OpARM64MSUB(v)
case OpARM64MSUBW:
return rewriteValueARM64_OpARM64MSUBW(v)
case OpARM64MUL:
return rewriteValueARM64_OpARM64MUL(v)
case OpARM64MULW:
return rewriteValueARM64_OpARM64MULW(v)
case OpARM64MVN:
return rewriteValueARM64_OpARM64MVN(v)
case OpARM64MVNshiftLL:
return rewriteValueARM64_OpARM64MVNshiftLL(v)
case OpARM64MVNshiftRA:
return rewriteValueARM64_OpARM64MVNshiftRA(v)
case OpARM64MVNshiftRL:
return rewriteValueARM64_OpARM64MVNshiftRL(v)
case OpARM64MVNshiftRO:
return rewriteValueARM64_OpARM64MVNshiftRO(v)
case OpARM64NEG:
return rewriteValueARM64_OpARM64NEG(v)
case OpARM64NEGshiftLL:
return rewriteValueARM64_OpARM64NEGshiftLL(v)
case OpARM64NEGshiftRA:
return rewriteValueARM64_OpARM64NEGshiftRA(v)
case OpARM64NEGshiftRL:
return rewriteValueARM64_OpARM64NEGshiftRL(v)
case OpARM64NotEqual:
return rewriteValueARM64_OpARM64NotEqual(v)
case OpARM64OR:
return rewriteValueARM64_OpARM64OR(v)
case OpARM64ORN:
return rewriteValueARM64_OpARM64ORN(v)
case OpARM64ORNshiftLL:
return rewriteValueARM64_OpARM64ORNshiftLL(v)
case OpARM64ORNshiftRA:
return rewriteValueARM64_OpARM64ORNshiftRA(v)
case OpARM64ORNshiftRL:
return rewriteValueARM64_OpARM64ORNshiftRL(v)
case OpARM64ORNshiftRO:
return rewriteValueARM64_OpARM64ORNshiftRO(v)
case OpARM64ORconst:
return rewriteValueARM64_OpARM64ORconst(v)
case OpARM64ORshiftLL:
return rewriteValueARM64_OpARM64ORshiftLL(v)
case OpARM64ORshiftRA:
return rewriteValueARM64_OpARM64ORshiftRA(v)
case OpARM64ORshiftRL:
return rewriteValueARM64_OpARM64ORshiftRL(v)
case OpARM64ORshiftRO:
return rewriteValueARM64_OpARM64ORshiftRO(v)
case OpARM64REV:
return rewriteValueARM64_OpARM64REV(v)
case OpARM64REVW:
return rewriteValueARM64_OpARM64REVW(v)
case OpARM64ROR:
return rewriteValueARM64_OpARM64ROR(v)
case OpARM64RORW:
return rewriteValueARM64_OpARM64RORW(v)
case OpARM64SBCSflags:
return rewriteValueARM64_OpARM64SBCSflags(v)
case OpARM64SBFX:
return rewriteValueARM64_OpARM64SBFX(v)
case OpARM64SLL:
return rewriteValueARM64_OpARM64SLL(v)
case OpARM64SLLconst:
return rewriteValueARM64_OpARM64SLLconst(v)
case OpARM64SRA:
return rewriteValueARM64_OpARM64SRA(v)
case OpARM64SRAconst:
return rewriteValueARM64_OpARM64SRAconst(v)
case OpARM64SRL:
return rewriteValueARM64_OpARM64SRL(v)
case OpARM64SRLconst:
return rewriteValueARM64_OpARM64SRLconst(v)
case OpARM64STP:
return rewriteValueARM64_OpARM64STP(v)
case OpARM64SUB:
return rewriteValueARM64_OpARM64SUB(v)
case OpARM64SUBconst:
return rewriteValueARM64_OpARM64SUBconst(v)
case OpARM64SUBshiftLL:
return rewriteValueARM64_OpARM64SUBshiftLL(v)
case OpARM64SUBshiftRA:
return rewriteValueARM64_OpARM64SUBshiftRA(v)
case OpARM64SUBshiftRL:
return rewriteValueARM64_OpARM64SUBshiftRL(v)
case OpARM64TST:
return rewriteValueARM64_OpARM64TST(v)
case OpARM64TSTW:
return rewriteValueARM64_OpARM64TSTW(v)
case OpARM64TSTWconst:
return rewriteValueARM64_OpARM64TSTWconst(v)
case OpARM64TSTconst:
return rewriteValueARM64_OpARM64TSTconst(v)
case OpARM64TSTshiftLL:
return rewriteValueARM64_OpARM64TSTshiftLL(v)
case OpARM64TSTshiftRA:
return rewriteValueARM64_OpARM64TSTshiftRA(v)
case OpARM64TSTshiftRL:
return rewriteValueARM64_OpARM64TSTshiftRL(v)
case OpARM64TSTshiftRO:
return rewriteValueARM64_OpARM64TSTshiftRO(v)
case OpARM64UBFIZ:
return rewriteValueARM64_OpARM64UBFIZ(v)
case OpARM64UBFX:
return rewriteValueARM64_OpARM64UBFX(v)
case OpARM64UDIV:
return rewriteValueARM64_OpARM64UDIV(v)
case OpARM64UDIVW:
return rewriteValueARM64_OpARM64UDIVW(v)
case OpARM64UMOD:
return rewriteValueARM64_OpARM64UMOD(v)
case OpARM64UMODW:
return rewriteValueARM64_OpARM64UMODW(v)
case OpARM64XOR:
return rewriteValueARM64_OpARM64XOR(v)
case OpARM64XORconst:
return rewriteValueARM64_OpARM64XORconst(v)
case OpARM64XORshiftLL:
return rewriteValueARM64_OpARM64XORshiftLL(v)
case OpARM64XORshiftRA:
return rewriteValueARM64_OpARM64XORshiftRA(v)
case OpARM64XORshiftRL:
return rewriteValueARM64_OpARM64XORshiftRL(v)
case OpARM64XORshiftRO:
return rewriteValueARM64_OpARM64XORshiftRO(v)
case OpAbs:
v.Op = OpARM64FABSD
return true
case OpAdd16:
v.Op = OpARM64ADD
return true
case OpAdd32:
v.Op = OpARM64ADD
return true
case OpAdd32F:
v.Op = OpARM64FADDS
return true
case OpAdd64:
v.Op = OpARM64ADD
return true
case OpAdd64F:
v.Op = OpARM64FADDD
return true
case OpAdd8:
v.Op = OpARM64ADD
return true
case OpAddPtr:
v.Op = OpARM64ADD
return true
case OpAddr:
return rewriteValueARM64_OpAddr(v)
case OpAnd16:
v.Op = OpARM64AND
return true
case OpAnd32:
v.Op = OpARM64AND
return true
case OpAnd64:
v.Op = OpARM64AND
return true
case OpAnd8:
v.Op = OpARM64AND
return true
case OpAndB:
v.Op = OpARM64AND
return true
case OpAtomicAdd32:
v.Op = OpARM64LoweredAtomicAdd32
return true
case OpAtomicAdd32Variant:
v.Op = OpARM64LoweredAtomicAdd32Variant
return true
case OpAtomicAdd64:
v.Op = OpARM64LoweredAtomicAdd64
return true
case OpAtomicAdd64Variant:
v.Op = OpARM64LoweredAtomicAdd64Variant
return true
case OpAtomicAnd32value:
v.Op = OpARM64LoweredAtomicAnd32
return true
case OpAtomicAnd32valueVariant:
v.Op = OpARM64LoweredAtomicAnd32Variant
return true
case OpAtomicAnd64value:
v.Op = OpARM64LoweredAtomicAnd64
return true
case OpAtomicAnd64valueVariant:
v.Op = OpARM64LoweredAtomicAnd64Variant
return true
case OpAtomicAnd8value:
v.Op = OpARM64LoweredAtomicAnd8
return true
case OpAtomicAnd8valueVariant:
v.Op = OpARM64LoweredAtomicAnd8Variant
return true
case OpAtomicCompareAndSwap32:
v.Op = OpARM64LoweredAtomicCas32
return true
case OpAtomicCompareAndSwap32Variant:
v.Op = OpARM64LoweredAtomicCas32Variant
return true
case OpAtomicCompareAndSwap64:
v.Op = OpARM64LoweredAtomicCas64
return true
case OpAtomicCompareAndSwap64Variant:
v.Op = OpARM64LoweredAtomicCas64Variant
return true
case OpAtomicExchange32:
v.Op = OpARM64LoweredAtomicExchange32
return true
case OpAtomicExchange32Variant:
v.Op = OpARM64LoweredAtomicExchange32Variant
return true
case OpAtomicExchange64:
v.Op = OpARM64LoweredAtomicExchange64
return true
case OpAtomicExchange64Variant:
v.Op = OpARM64LoweredAtomicExchange64Variant
return true
case OpAtomicLoad32:
v.Op = OpARM64LDARW
return true
case OpAtomicLoad64:
v.Op = OpARM64LDAR
return true
case OpAtomicLoad8:
v.Op = OpARM64LDARB
return true
case OpAtomicLoadPtr:
v.Op = OpARM64LDAR
return true
case OpAtomicOr32value:
v.Op = OpARM64LoweredAtomicOr32
return true
case OpAtomicOr32valueVariant:
v.Op = OpARM64LoweredAtomicOr32Variant
return true
case OpAtomicOr64value:
v.Op = OpARM64LoweredAtomicOr64
return true
case OpAtomicOr64valueVariant:
v.Op = OpARM64LoweredAtomicOr64Variant
return true
case OpAtomicOr8value:
v.Op = OpARM64LoweredAtomicOr8
return true
case OpAtomicOr8valueVariant:
v.Op = OpARM64LoweredAtomicOr8Variant
return true
case OpAtomicStore32:
v.Op = OpARM64STLRW
return true
case OpAtomicStore64:
v.Op = OpARM64STLR
return true
case OpAtomicStore8:
v.Op = OpARM64STLRB
return true
case OpAtomicStorePtrNoWB:
v.Op = OpARM64STLR
return true
case OpAvg64u:
return rewriteValueARM64_OpAvg64u(v)
case OpBitLen32:
return rewriteValueARM64_OpBitLen32(v)
case OpBitLen64:
return rewriteValueARM64_OpBitLen64(v)
case OpBitRev16:
return rewriteValueARM64_OpBitRev16(v)
case OpBitRev32:
v.Op = OpARM64RBITW
return true
case OpBitRev64:
v.Op = OpARM64RBIT
return true
case OpBitRev8:
return rewriteValueARM64_OpBitRev8(v)
case OpBswap16:
v.Op = OpARM64REV16W
return true
case OpBswap32:
v.Op = OpARM64REVW
return true
case OpBswap64:
v.Op = OpARM64REV
return true
case OpCeil:
v.Op = OpARM64FRINTPD
return true
case OpClosureCall:
v.Op = OpARM64CALLclosure
return true
case OpCom16:
v.Op = OpARM64MVN
return true
case OpCom32:
v.Op = OpARM64MVN
return true
case OpCom64:
v.Op = OpARM64MVN
return true
case OpCom8:
v.Op = OpARM64MVN
return true
case OpCondSelect:
return rewriteValueARM64_OpCondSelect(v)
case OpConst16:
return rewriteValueARM64_OpConst16(v)
case OpConst32:
return rewriteValueARM64_OpConst32(v)
case OpConst32F:
return rewriteValueARM64_OpConst32F(v)
case OpConst64:
return rewriteValueARM64_OpConst64(v)
case OpConst64F:
return rewriteValueARM64_OpConst64F(v)
case OpConst8:
return rewriteValueARM64_OpConst8(v)
case OpConstBool:
return rewriteValueARM64_OpConstBool(v)
case OpConstNil:
return rewriteValueARM64_OpConstNil(v)
case OpCtz16:
return rewriteValueARM64_OpCtz16(v)
case OpCtz16NonZero:
v.Op = OpCtz32
return true
case OpCtz32:
return rewriteValueARM64_OpCtz32(v)
case OpCtz32NonZero:
v.Op = OpCtz32
return true
case OpCtz64:
return rewriteValueARM64_OpCtz64(v)
case OpCtz64NonZero:
v.Op = OpCtz64
return true
case OpCtz8:
return rewriteValueARM64_OpCtz8(v)
case OpCtz8NonZero:
v.Op = OpCtz32
return true
case OpCvt32Fto32:
v.Op = OpARM64FCVTZSSW
return true
case OpCvt32Fto32U:
v.Op = OpARM64FCVTZUSW
return true
case OpCvt32Fto64:
v.Op = OpARM64FCVTZSS
return true
case OpCvt32Fto64F:
v.Op = OpARM64FCVTSD
return true
case OpCvt32Fto64U:
v.Op = OpARM64FCVTZUS
return true
case OpCvt32Uto32F:
v.Op = OpARM64UCVTFWS
return true
case OpCvt32Uto64F:
v.Op = OpARM64UCVTFWD
return true
case OpCvt32to32F:
v.Op = OpARM64SCVTFWS
return true
case OpCvt32to64F:
v.Op = OpARM64SCVTFWD
return true
case OpCvt64Fto32:
v.Op = OpARM64FCVTZSDW
return true
case OpCvt64Fto32F:
v.Op = OpARM64FCVTDS
return true
case OpCvt64Fto32U:
v.Op = OpARM64FCVTZUDW
return true
case OpCvt64Fto64:
v.Op = OpARM64FCVTZSD
return true
case OpCvt64Fto64U:
v.Op = OpARM64FCVTZUD
return true
case OpCvt64Uto32F:
v.Op = OpARM64UCVTFS
return true
case OpCvt64Uto64F:
v.Op = OpARM64UCVTFD
return true
case OpCvt64to32F:
v.Op = OpARM64SCVTFS
return true
case OpCvt64to64F:
v.Op = OpARM64SCVTFD
return true
case OpCvtBoolToUint8:
v.Op = OpCopy
return true
case OpDiv16:
return rewriteValueARM64_OpDiv16(v)
case OpDiv16u:
return rewriteValueARM64_OpDiv16u(v)
case OpDiv32:
return rewriteValueARM64_OpDiv32(v)
case OpDiv32F:
v.Op = OpARM64FDIVS
return true
case OpDiv32u:
v.Op = OpARM64UDIVW
return true
case OpDiv64:
return rewriteValueARM64_OpDiv64(v)
case OpDiv64F:
v.Op = OpARM64FDIVD
return true
case OpDiv64u:
v.Op = OpARM64UDIV
return true
case OpDiv8:
return rewriteValueARM64_OpDiv8(v)
case OpDiv8u:
return rewriteValueARM64_OpDiv8u(v)
case OpEq16:
return rewriteValueARM64_OpEq16(v)
case OpEq32:
return rewriteValueARM64_OpEq32(v)
case OpEq32F:
return rewriteValueARM64_OpEq32F(v)
case OpEq64:
return rewriteValueARM64_OpEq64(v)
case OpEq64F:
return rewriteValueARM64_OpEq64F(v)
case OpEq8:
return rewriteValueARM64_OpEq8(v)
case OpEqB:
return rewriteValueARM64_OpEqB(v)
case OpEqPtr:
return rewriteValueARM64_OpEqPtr(v)
case OpFMA:
return rewriteValueARM64_OpFMA(v)
case OpFloor:
v.Op = OpARM64FRINTMD
return true
case OpGetCallerPC:
v.Op = OpARM64LoweredGetCallerPC
return true
case OpGetCallerSP:
v.Op = OpARM64LoweredGetCallerSP
return true
case OpGetClosurePtr:
v.Op = OpARM64LoweredGetClosurePtr
return true
case OpHmul32:
return rewriteValueARM64_OpHmul32(v)
case OpHmul32u:
return rewriteValueARM64_OpHmul32u(v)
case OpHmul64:
v.Op = OpARM64MULH
return true
case OpHmul64u:
v.Op = OpARM64UMULH
return true
case OpInterCall:
v.Op = OpARM64CALLinter
return true
case OpIsInBounds:
return rewriteValueARM64_OpIsInBounds(v)
case OpIsNonNil:
return rewriteValueARM64_OpIsNonNil(v)
case OpIsSliceInBounds:
return rewriteValueARM64_OpIsSliceInBounds(v)
case OpLeq16:
return rewriteValueARM64_OpLeq16(v)
case OpLeq16U:
return rewriteValueARM64_OpLeq16U(v)
case OpLeq32:
return rewriteValueARM64_OpLeq32(v)
case OpLeq32F:
return rewriteValueARM64_OpLeq32F(v)
case OpLeq32U:
return rewriteValueARM64_OpLeq32U(v)
case OpLeq64:
return rewriteValueARM64_OpLeq64(v)
case OpLeq64F:
return rewriteValueARM64_OpLeq64F(v)
case OpLeq64U:
return rewriteValueARM64_OpLeq64U(v)
case OpLeq8:
return rewriteValueARM64_OpLeq8(v)
case OpLeq8U:
return rewriteValueARM64_OpLeq8U(v)
case OpLess16:
return rewriteValueARM64_OpLess16(v)
case OpLess16U:
return rewriteValueARM64_OpLess16U(v)
case OpLess32:
return rewriteValueARM64_OpLess32(v)
case OpLess32F:
return rewriteValueARM64_OpLess32F(v)
case OpLess32U:
return rewriteValueARM64_OpLess32U(v)
case OpLess64:
return rewriteValueARM64_OpLess64(v)
case OpLess64F:
return rewriteValueARM64_OpLess64F(v)
case OpLess64U:
return rewriteValueARM64_OpLess64U(v)
case OpLess8:
return rewriteValueARM64_OpLess8(v)
case OpLess8U:
return rewriteValueARM64_OpLess8U(v)
case OpLoad:
return rewriteValueARM64_OpLoad(v)
case OpLocalAddr:
return rewriteValueARM64_OpLocalAddr(v)
case OpLsh16x16:
return rewriteValueARM64_OpLsh16x16(v)
case OpLsh16x32:
return rewriteValueARM64_OpLsh16x32(v)
case OpLsh16x64:
return rewriteValueARM64_OpLsh16x64(v)
case OpLsh16x8:
return rewriteValueARM64_OpLsh16x8(v)
case OpLsh32x16:
return rewriteValueARM64_OpLsh32x16(v)
case OpLsh32x32:
return rewriteValueARM64_OpLsh32x32(v)
case OpLsh32x64:
return rewriteValueARM64_OpLsh32x64(v)
case OpLsh32x8:
return rewriteValueARM64_OpLsh32x8(v)
case OpLsh64x16:
return rewriteValueARM64_OpLsh64x16(v)
case OpLsh64x32:
return rewriteValueARM64_OpLsh64x32(v)
case OpLsh64x64:
return rewriteValueARM64_OpLsh64x64(v)
case OpLsh64x8:
return rewriteValueARM64_OpLsh64x8(v)
case OpLsh8x16:
return rewriteValueARM64_OpLsh8x16(v)
case OpLsh8x32:
return rewriteValueARM64_OpLsh8x32(v)
case OpLsh8x64:
return rewriteValueARM64_OpLsh8x64(v)
case OpLsh8x8:
return rewriteValueARM64_OpLsh8x8(v)
case OpMax32F:
v.Op = OpARM64FMAXS
return true
case OpMax64F:
v.Op = OpARM64FMAXD
return true
case OpMin32F:
v.Op = OpARM64FMINS
return true
case OpMin64F:
v.Op = OpARM64FMIND
return true
case OpMod16:
return rewriteValueARM64_OpMod16(v)
case OpMod16u:
return rewriteValueARM64_OpMod16u(v)
case OpMod32:
return rewriteValueARM64_OpMod32(v)
case OpMod32u:
v.Op = OpARM64UMODW
return true
case OpMod64:
return rewriteValueARM64_OpMod64(v)
case OpMod64u:
v.Op = OpARM64UMOD
return true
case OpMod8:
return rewriteValueARM64_OpMod8(v)
case OpMod8u:
return rewriteValueARM64_OpMod8u(v)
case OpMove:
return rewriteValueARM64_OpMove(v)
case OpMul16:
v.Op = OpARM64MULW
return true
case OpMul32:
v.Op = OpARM64MULW
return true
case OpMul32F:
v.Op = OpARM64FMULS
return true
case OpMul64:
v.Op = OpARM64MUL
return true
case OpMul64F:
v.Op = OpARM64FMULD
return true
case OpMul8:
v.Op = OpARM64MULW
return true
case OpNeg16:
v.Op = OpARM64NEG
return true
case OpNeg32:
v.Op = OpARM64NEG
return true
case OpNeg32F:
v.Op = OpARM64FNEGS
return true
case OpNeg64:
v.Op = OpARM64NEG
return true
case OpNeg64F:
v.Op = OpARM64FNEGD
return true
case OpNeg8:
v.Op = OpARM64NEG
return true
case OpNeq16:
return rewriteValueARM64_OpNeq16(v)
case OpNeq32:
return rewriteValueARM64_OpNeq32(v)
case OpNeq32F:
return rewriteValueARM64_OpNeq32F(v)
case OpNeq64:
return rewriteValueARM64_OpNeq64(v)
case OpNeq64F:
return rewriteValueARM64_OpNeq64F(v)
case OpNeq8:
return rewriteValueARM64_OpNeq8(v)
case OpNeqB:
v.Op = OpARM64XOR
return true
case OpNeqPtr:
return rewriteValueARM64_OpNeqPtr(v)
case OpNilCheck:
v.Op = OpARM64LoweredNilCheck
return true
case OpNot:
return rewriteValueARM64_OpNot(v)
case OpOffPtr:
return rewriteValueARM64_OpOffPtr(v)
case OpOr16:
v.Op = OpARM64OR
return true
case OpOr32:
v.Op = OpARM64OR
return true
case OpOr64:
v.Op = OpARM64OR
return true
case OpOr8:
v.Op = OpARM64OR
return true
case OpOrB:
v.Op = OpARM64OR
return true
case OpPanicBounds:
return rewriteValueARM64_OpPanicBounds(v)
case OpPopCount16:
return rewriteValueARM64_OpPopCount16(v)
case OpPopCount32:
return rewriteValueARM64_OpPopCount32(v)
case OpPopCount64:
return rewriteValueARM64_OpPopCount64(v)
case OpPrefetchCache:
return rewriteValueARM64_OpPrefetchCache(v)
case OpPrefetchCacheStreamed:
return rewriteValueARM64_OpPrefetchCacheStreamed(v)
case OpPubBarrier:
return rewriteValueARM64_OpPubBarrier(v)
case OpRotateLeft16:
return rewriteValueARM64_OpRotateLeft16(v)
case OpRotateLeft32:
return rewriteValueARM64_OpRotateLeft32(v)
case OpRotateLeft64:
return rewriteValueARM64_OpRotateLeft64(v)
case OpRotateLeft8:
return rewriteValueARM64_OpRotateLeft8(v)
case OpRound:
v.Op = OpARM64FRINTAD
return true
case OpRound32F:
v.Op = OpARM64LoweredRound32F
return true
case OpRound64F:
v.Op = OpARM64LoweredRound64F
return true
case OpRoundToEven:
v.Op = OpARM64FRINTND
return true
case OpRsh16Ux16:
return rewriteValueARM64_OpRsh16Ux16(v)
case OpRsh16Ux32:
return rewriteValueARM64_OpRsh16Ux32(v)
case OpRsh16Ux64:
return rewriteValueARM64_OpRsh16Ux64(v)
case OpRsh16Ux8:
return rewriteValueARM64_OpRsh16Ux8(v)
case OpRsh16x16:
return rewriteValueARM64_OpRsh16x16(v)
case OpRsh16x32:
return rewriteValueARM64_OpRsh16x32(v)
case OpRsh16x64:
return rewriteValueARM64_OpRsh16x64(v)
case OpRsh16x8:
return rewriteValueARM64_OpRsh16x8(v)
case OpRsh32Ux16:
return rewriteValueARM64_OpRsh32Ux16(v)
case OpRsh32Ux32:
return rewriteValueARM64_OpRsh32Ux32(v)
case OpRsh32Ux64:
return rewriteValueARM64_OpRsh32Ux64(v)
case OpRsh32Ux8:
return rewriteValueARM64_OpRsh32Ux8(v)
case OpRsh32x16:
return rewriteValueARM64_OpRsh32x16(v)
case OpRsh32x32:
return rewriteValueARM64_OpRsh32x32(v)
case OpRsh32x64:
return rewriteValueARM64_OpRsh32x64(v)
case OpRsh32x8:
return rewriteValueARM64_OpRsh32x8(v)
case OpRsh64Ux16:
return rewriteValueARM64_OpRsh64Ux16(v)
case OpRsh64Ux32:
return rewriteValueARM64_OpRsh64Ux32(v)
case OpRsh64Ux64:
return rewriteValueARM64_OpRsh64Ux64(v)
case OpRsh64Ux8:
return rewriteValueARM64_OpRsh64Ux8(v)
case OpRsh64x16:
return rewriteValueARM64_OpRsh64x16(v)
case OpRsh64x32:
return rewriteValueARM64_OpRsh64x32(v)
case OpRsh64x64:
return rewriteValueARM64_OpRsh64x64(v)
case OpRsh64x8:
return rewriteValueARM64_OpRsh64x8(v)
case OpRsh8Ux16:
return rewriteValueARM64_OpRsh8Ux16(v)
case OpRsh8Ux32:
return rewriteValueARM64_OpRsh8Ux32(v)
case OpRsh8Ux64:
return rewriteValueARM64_OpRsh8Ux64(v)
case OpRsh8Ux8:
return rewriteValueARM64_OpRsh8Ux8(v)
case OpRsh8x16:
return rewriteValueARM64_OpRsh8x16(v)
case OpRsh8x32:
return rewriteValueARM64_OpRsh8x32(v)
case OpRsh8x64:
return rewriteValueARM64_OpRsh8x64(v)
case OpRsh8x8:
return rewriteValueARM64_OpRsh8x8(v)
case OpSelect0:
return rewriteValueARM64_OpSelect0(v)
case OpSelect1:
return rewriteValueARM64_OpSelect1(v)
case OpSelectN:
return rewriteValueARM64_OpSelectN(v)
case OpSignExt16to32:
v.Op = OpARM64MOVHreg
return true
case OpSignExt16to64:
v.Op = OpARM64MOVHreg
return true
case OpSignExt32to64:
v.Op = OpARM64MOVWreg
return true
case OpSignExt8to16:
v.Op = OpARM64MOVBreg
return true
case OpSignExt8to32:
v.Op = OpARM64MOVBreg
return true
case OpSignExt8to64:
v.Op = OpARM64MOVBreg
return true
case OpSlicemask:
return rewriteValueARM64_OpSlicemask(v)
case OpSqrt:
v.Op = OpARM64FSQRTD
return true
case OpSqrt32:
v.Op = OpARM64FSQRTS
return true
case OpStaticCall:
v.Op = OpARM64CALLstatic
return true
case OpStore:
return rewriteValueARM64_OpStore(v)
case OpSub16:
v.Op = OpARM64SUB
return true
case OpSub32:
v.Op = OpARM64SUB
return true
case OpSub32F:
v.Op = OpARM64FSUBS
return true
case OpSub64:
v.Op = OpARM64SUB
return true
case OpSub64F:
v.Op = OpARM64FSUBD
return true
case OpSub8:
v.Op = OpARM64SUB
return true
case OpSubPtr:
v.Op = OpARM64SUB
return true
case OpTailCall:
v.Op = OpARM64CALLtail
return true
case OpTrunc:
v.Op = OpARM64FRINTZD
return true
case OpTrunc16to8:
v.Op = OpCopy
return true
case OpTrunc32to16:
v.Op = OpCopy
return true
case OpTrunc32to8:
v.Op = OpCopy
return true
case OpTrunc64to16:
v.Op = OpCopy
return true
case OpTrunc64to32:
v.Op = OpCopy
return true
case OpTrunc64to8:
v.Op = OpCopy
return true
case OpWB:
v.Op = OpARM64LoweredWB
return true
case OpXor16:
v.Op = OpARM64XOR
return true
case OpXor32:
v.Op = OpARM64XOR
return true
case OpXor64:
v.Op = OpARM64XOR
return true
case OpXor8:
v.Op = OpARM64XOR
return true
case OpZero:
return rewriteValueARM64_OpZero(v)
case OpZeroExt16to32:
v.Op = OpARM64MOVHUreg
return true
case OpZeroExt16to64:
v.Op = OpARM64MOVHUreg
return true
case OpZeroExt32to64:
v.Op = OpARM64MOVWUreg
return true
case OpZeroExt8to16:
v.Op = OpARM64MOVBUreg
return true
case OpZeroExt8to32:
v.Op = OpARM64MOVBUreg
return true
case OpZeroExt8to64:
v.Op = OpARM64MOVBUreg
return true
}
return false
}
func rewriteValueARM64_OpARM64ADCSflags(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (ADCzerocarry <typ.UInt64> c))))
// result: (ADCSflags x y c)
for {
x := v_0
y := v_1
if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
break
}
v_2_0 := v_2.Args[0]
if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 {
break
}
v_2_0_0 := v_2_0.Args[0]
if v_2_0_0.Op != OpARM64ADCzerocarry || v_2_0_0.Type != typ.UInt64 {
break
}
c := v_2_0_0.Args[0]
v.reset(OpARM64ADCSflags)
v.AddArg3(x, y, c)
return true
}
// match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (MOVDconst [0]))))
// result: (ADDSflags x y)
for {
x := v_0
y := v_1
if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags {
break
}
v_2_0 := v_2.Args[0]
if v_2_0.Op != OpARM64ADDSconstflags || auxIntToInt64(v_2_0.AuxInt) != -1 {
break
}
v_2_0_0 := v_2_0.Args[0]
if v_2_0_0.Op != OpARM64MOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 {
break
}
v.reset(OpARM64ADDSflags)
v.AddArg2(x, y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADD(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (ADD x (MOVDconst <t> [c]))
// cond: !t.IsPtr()
// result: (ADDconst [c] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
t := v_1.Type
c := auxIntToInt64(v_1.AuxInt)
if !(!t.IsPtr()) {
continue
}
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(c)
v.AddArg(x)
return true
}
break
}
// match: (ADD a l:(MUL x y))
// cond: l.Uses==1 && clobber(l)
// result: (MADD a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
l := v_1
if l.Op != OpARM64MUL {
continue
}
y := l.Args[1]
x := l.Args[0]
if !(l.Uses == 1 && clobber(l)) {
continue
}
v.reset(OpARM64MADD)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (ADD a l:(MNEG x y))
// cond: l.Uses==1 && clobber(l)
// result: (MSUB a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
l := v_1
if l.Op != OpARM64MNEG {
continue
}
y := l.Args[1]
x := l.Args[0]
if !(l.Uses == 1 && clobber(l)) {
continue
}
v.reset(OpARM64MSUB)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (ADD a l:(MULW x y))
// cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l)
// result: (MADDW a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
l := v_1
if l.Op != OpARM64MULW {
continue
}
y := l.Args[1]
x := l.Args[0]
if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) {
continue
}
v.reset(OpARM64MADDW)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (ADD a l:(MNEGW x y))
// cond: v.Type.Size() <= 4 && l.Uses==1 && clobber(l)
// result: (MSUBW a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
l := v_1
if l.Op != OpARM64MNEGW {
continue
}
y := l.Args[1]
x := l.Args[0]
if !(v.Type.Size() <= 4 && l.Uses == 1 && clobber(l)) {
continue
}
v.reset(OpARM64MSUBW)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (ADD x (NEG y))
// result: (SUB x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64NEG {
continue
}
y := v_1.Args[0]
v.reset(OpARM64SUB)
v.AddArg2(x, y)
return true
}
break
}
// match: (ADD x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (ADDshiftLL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ADDshiftLL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
// match: (ADD x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (ADDshiftRL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ADDshiftRL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
// match: (ADD x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (ADDshiftRA x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ADDshiftRA)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64ADDSflags(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (ADDSflags x (MOVDconst [c]))
// result: (ADDSconstflags [c] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ADDSconstflags)
v.AuxInt = int64ToAuxInt(c)
v.AddArg(x)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64ADDconst(v *Value) bool {
v_0 := v.Args[0]
// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
// cond: is32Bit(off1+int64(off2))
// result: (MOVDaddr [int32(off1)+off2] {sym} ptr)
for {
off1 := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := auxIntToInt32(v_0.AuxInt)
sym := auxToSym(v_0.Aux)
ptr := v_0.Args[0]
if !(is32Bit(off1 + int64(off2))) {
break
}
v.reset(OpARM64MOVDaddr)
v.AuxInt = int32ToAuxInt(int32(off1) + off2)
v.Aux = symToAux(sym)
v.AddArg(ptr)
return true
}
// match: (ADDconst [c] y)
// cond: c < 0
// result: (SUBconst [-c] y)
for {
c := auxIntToInt64(v.AuxInt)
y := v_0
if !(c < 0) {
break
}
v.reset(OpARM64SUBconst)
v.AuxInt = int64ToAuxInt(-c)
v.AddArg(y)
return true
}
// match: (ADDconst [0] x)
// result: x
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
}
x := v_0
v.copyOf(x)
return true
}
// match: (ADDconst [c] (MOVDconst [d]))
// result: (MOVDconst [c+d])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
d := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(c + d)
return true
}
// match: (ADDconst [c] (ADDconst [d] x))
// result: (ADDconst [c+d] x)
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64ADDconst {
break
}
d := auxIntToInt64(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(c + d)
v.AddArg(x)
return true
}
// match: (ADDconst [c] (SUBconst [d] x))
// result: (ADDconst [c-d] x)
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SUBconst {
break
}
d := auxIntToInt64(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(c - d)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
// match: (ADDshiftLL (MOVDconst [c]) x [d])
// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftLL x (MOVDconst [c]) [d])
// result: (ADDconst x [int64(uint64(c)<<uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
v.AddArg(x)
return true
}
// match: (ADDshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x)
// result: (REV16W x)
for {
if v.Type != typ.UInt16 || auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 8) {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64REV16W)
v.AddArg(x)
return true
}
// match: (ADDshiftLL [8] (UBFX [armBFAuxInt(8, 24)] (ANDconst [c1] x)) (ANDconst [c2] x))
// cond: uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff
// result: (REV16W x)
for {
if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64UBFX || auxIntToArm64BitField(v_0.AuxInt) != armBFAuxInt(8, 24) {
break
}
v_0_0 := v_0.Args[0]
if v_0_0.Op != OpARM64ANDconst {
break
}
c1 := auxIntToInt64(v_0_0.AuxInt)
x := v_0_0.Args[0]
if v_1.Op != OpARM64ANDconst {
break
}
c2 := auxIntToInt64(v_1.AuxInt)
if x != v_1.Args[0] || !(uint32(c1) == 0xff00ff00 && uint32(c2) == 0x00ff00ff) {
break
}
v.reset(OpARM64REV16W)
v.AddArg(x)
return true
}
// match: (ADDshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
// cond: (uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff)
// result: (REV16 x)
for {
if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
break
}
v_0_0 := v_0.Args[0]
if v_0_0.Op != OpARM64ANDconst {
break
}
c1 := auxIntToInt64(v_0_0.AuxInt)
x := v_0_0.Args[0]
if v_1.Op != OpARM64ANDconst {
break
}
c2 := auxIntToInt64(v_1.AuxInt)
if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00ff00ff00 && uint64(c2) == 0x00ff00ff00ff00ff) {
break
}
v.reset(OpARM64REV16)
v.AddArg(x)
return true
}
// match: (ADDshiftLL [8] (SRLconst [8] (ANDconst [c1] x)) (ANDconst [c2] x))
// cond: (uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff)
// result: (REV16 (ANDconst <x.Type> [0xffffffff] x))
for {
if auxIntToInt64(v.AuxInt) != 8 || v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 8 {
break
}
v_0_0 := v_0.Args[0]
if v_0_0.Op != OpARM64ANDconst {
break
}
c1 := auxIntToInt64(v_0_0.AuxInt)
x := v_0_0.Args[0]
if v_1.Op != OpARM64ANDconst {
break
}
c2 := auxIntToInt64(v_1.AuxInt)
if x != v_1.Args[0] || !(uint64(c1) == 0xff00ff00 && uint64(c2) == 0x00ff00ff) {
break
}
v.reset(OpARM64REV16)
v0 := b.NewValue0(v.Pos, OpARM64ANDconst, x.Type)
v0.AuxInt = int64ToAuxInt(0xffffffff)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftLL [c] (SRLconst x [64-c]) x2)
// result: (EXTRconst [64-c] x2 x)
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != 64-c {
break
}
x := v_0.Args[0]
x2 := v_1
v.reset(OpARM64EXTRconst)
v.AuxInt = int64ToAuxInt(64 - c)
v.AddArg2(x2, x)
return true
}
// match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2)
// cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)
// result: (EXTRWconst [32-c] x2 x)
for {
t := v.Type
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64UBFX {
break
}
bfc := auxIntToArm64BitField(v_0.AuxInt)
x := v_0.Args[0]
x2 := v_1
if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) {
break
}
v.reset(OpARM64EXTRWconst)
v.AuxInt = int64ToAuxInt(32 - c)
v.AddArg2(x2, x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ADDshiftRA (MOVDconst [c]) x [d])
// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftRA x (MOVDconst [c]) [d])
// result: (ADDconst x [c>>uint64(d)])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ADDshiftRL (MOVDconst [c]) x [d])
// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftRL x (MOVDconst [c]) [d])
// result: (ADDconst x [int64(uint64(c)>>uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64AND(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (AND x (MOVDconst [c]))
// result: (ANDconst [c] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c)
v.AddArg(x)
return true
}
break
}
// match: (AND x x)
// result: x
for {
x := v_0
if x != v_1 {
break
}
v.copyOf(x)
return true
}
// match: (AND x (MVN y))
// result: (BIC x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MVN {
continue
}
y := v_1.Args[0]
v.reset(OpARM64BIC)
v.AddArg2(x, y)
return true
}
break
}
// match: (AND x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (ANDshiftLL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ANDshiftLL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
// match: (AND x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (ANDshiftRL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ANDshiftRL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
// match: (AND x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (ANDshiftRA x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ANDshiftRA)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
// match: (AND x0 x1:(RORconst [c] y))
// cond: clobberIfDead(x1)
// result: (ANDshiftRO x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64RORconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64ANDshiftRO)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64ANDconst(v *Value) bool {
v_0 := v.Args[0]
// match: (ANDconst [0] _)
// result: (MOVDconst [0])
for {
if auxIntToInt64(v.AuxInt) != 0 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (ANDconst [-1] x)
// result: x
for {
if auxIntToInt64(v.AuxInt) != -1 {
break
}
x := v_0
v.copyOf(x)
return true
}
// match: (ANDconst [c] (MOVDconst [d]))
// result: (MOVDconst [c&d])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
d := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(c & d)
return true
}
// match: (ANDconst [c] (ANDconst [d] x))
// result: (ANDconst [c&d] x)
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64ANDconst {
break
}
d := auxIntToInt64(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c & d)
v.AddArg(x)
return true
}
// match: (ANDconst [c] (MOVWUreg x))
// result: (ANDconst [c&(1<<32-1)] x)
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVWUreg {
break
}
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c & (1<<32 - 1))
v.AddArg(x)
return true
}
// match: (ANDconst [c] (MOVHUreg x))
// result: (ANDconst [c&(1<<16-1)] x)
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVHUreg {
break
}
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c & (1<<16 - 1))
v.AddArg(x)
return true
}
// match: (ANDconst [c] (MOVBUreg x))
// result: (ANDconst [c&(1<<8-1)] x)
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVBUreg {
break
}
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c & (1<<8 - 1))
v.AddArg(x)
return true
}
// match: (ANDconst [ac] (SLLconst [sc] x))
// cond: isARM64BFMask(sc, ac, sc)
// result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x)
for {
ac := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SLLconst {
break
}
sc := auxIntToInt64(v_0.AuxInt)
x := v_0.Args[0]
if !(isARM64BFMask(sc, ac, sc)) {
break
}
v.reset(OpARM64UBFIZ)
v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, sc)))
v.AddArg(x)
return true
}
// match: (ANDconst [ac] (SRLconst [sc] x))
// cond: isARM64BFMask(sc, ac, 0)
// result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x)
for {
ac := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SRLconst {
break
}
sc := auxIntToInt64(v_0.AuxInt)
x := v_0.Args[0]
if !(isARM64BFMask(sc, ac, 0)) {
break
}
v.reset(OpARM64UBFX)
v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(sc, arm64BFWidth(ac, 0)))
v.AddArg(x)
return true
}
// match: (ANDconst [c] (UBFX [bfc] x))
// cond: isARM64BFMask(0, c, 0)
// result: (UBFX [armBFAuxInt(bfc.lsb(), min(bfc.width(), arm64BFWidth(c, 0)))] x)
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64UBFX {
break
}
bfc := auxIntToArm64BitField(v_0.AuxInt)
x := v_0.Args[0]
if !(isARM64BFMask(0, c, 0)) {
break
}
v.reset(OpARM64UBFX)
v.AuxInt = arm64BitFieldToAuxInt(armBFAuxInt(bfc.lsb(), min(bfc.width(), arm64BFWidth(c, 0))))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ANDshiftLL (MOVDconst [c]) x [d])
// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftLL x (MOVDconst [c]) [d])
// result: (ANDconst x [int64(uint64(c)<<uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
v.AddArg(x)
return true
}
// match: (ANDshiftLL y:(SLLconst x [c]) x [c])
// result: y
for {
c := auxIntToInt64(v.AuxInt)
y := v_0
if y.Op != OpARM64SLLconst || auxIntToInt64(y.AuxInt) != c {
break
}
x := y.Args[0]
if x != v_1 {
break
}
v.copyOf(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ANDshiftRA (MOVDconst [c]) x [d])
// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftRA x (MOVDconst [c]) [d])
// result: (ANDconst x [c>>uint64(d)])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
// match: (ANDshiftRA y:(SRAconst x [c]) x [c])
// result: y
for {
c := auxIntToInt64(v.AuxInt)
y := v_0
if y.Op != OpARM64SRAconst || auxIntToInt64(y.AuxInt) != c {
break
}
x := y.Args[0]
if x != v_1 {
break
}
v.copyOf(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ANDshiftRL (MOVDconst [c]) x [d])
// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftRL x (MOVDconst [c]) [d])
// result: (ANDconst x [int64(uint64(c)>>uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
v.AddArg(x)
return true
}
// match: (ANDshiftRL y:(SRLconst x [c]) x [c])
// result: y
for {
c := auxIntToInt64(v.AuxInt)
y := v_0
if y.Op != OpARM64SRLconst || auxIntToInt64(y.AuxInt) != c {
break
}
x := y.Args[0]
if x != v_1 {
break
}
v.copyOf(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftRO(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (ANDshiftRO (MOVDconst [c]) x [d])
// result: (ANDconst [c] (RORconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64RORconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftRO x (MOVDconst [c]) [d])
// result: (ANDconst x [rotateRight64(c, d)])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(rotateRight64(c, d))
v.AddArg(x)
return true
}
// match: (ANDshiftRO y:(RORconst x [c]) x [c])
// result: y
for {
c := auxIntToInt64(v.AuxInt)
y := v_0
if y.Op != OpARM64RORconst || auxIntToInt64(y.AuxInt) != c {
break
}
x := y.Args[0]
if x != v_1 {
break
}
v.copyOf(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64BIC(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BIC x (MOVDconst [c]))
// result: (ANDconst [^c] x)
for {
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(^c)
v.AddArg(x)
return true
}
// match: (BIC x x)
// result: (MOVDconst [0])
for {
x := v_0
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (BIC x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (BICshiftLL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64BICshiftLL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (BIC x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (BICshiftRL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64BICshiftRL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (BIC x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (BICshiftRA x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64BICshiftRA)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (BIC x0 x1:(RORconst [c] y))
// cond: clobberIfDead(x1)
// result: (BICshiftRO x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64RORconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64BICshiftRO)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftLL x (MOVDconst [c]) [d])
// result: (ANDconst x [^int64(uint64(c)<<uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
v.AddArg(x)
return true
}
// match: (BICshiftLL (SLLconst x [c]) x [c])
// result: (MOVDconst [0])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftRA x (MOVDconst [c]) [d])
// result: (ANDconst x [^(c>>uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
v.AddArg(x)
return true
}
// match: (BICshiftRA (SRAconst x [c]) x [c])
// result: (MOVDconst [0])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftRL x (MOVDconst [c]) [d])
// result: (ANDconst x [^int64(uint64(c)>>uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
v.AddArg(x)
return true
}
// match: (BICshiftRL (SRLconst x [c]) x [c])
// result: (MOVDconst [0])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftRO(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftRO x (MOVDconst [c]) [d])
// result: (ANDconst x [^rotateRight64(c, d)])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64ANDconst)
v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
v.AddArg(x)
return true
}
// match: (BICshiftRO (RORconst x [c]) x [c])
// result: (MOVDconst [0])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMN(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CMN x (MOVDconst [c]))
// result: (CMNconst [c] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMNconst)
v.AuxInt = int64ToAuxInt(c)
v.AddArg(x)
return true
}
break
}
// match: (CMN x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMNshiftLL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64CMNshiftLL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
// match: (CMN x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMNshiftRL x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64CMNshiftRL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
// match: (CMN x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMNshiftRA x0 y [c])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
continue
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
continue
}
v.reset(OpARM64CMNshiftRA)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64CMNW(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CMNW x (MOVDconst [c]))
// result: (CMNWconst [int32(c)] x)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
x := v_0
if v_1.Op != OpARM64MOVDconst {
continue
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMNWconst)
v.AuxInt = int32ToAuxInt(int32(c))
v.AddArg(x)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64CMNWconst(v *Value) bool {
v_0 := v.Args[0]
// match: (CMNWconst [c] y)
// cond: c < 0 && c != -1<<31
// result: (CMPWconst [-c] y)
for {
c := auxIntToInt32(v.AuxInt)
y := v_0
if !(c < 0 && c != -1<<31) {
break
}
v.reset(OpARM64CMPWconst)
v.AuxInt = int32ToAuxInt(-c)
v.AddArg(y)
return true
}
// match: (CMNWconst (MOVDconst [x]) [y])
// result: (FlagConstant [addFlags32(int32(x),y)])
for {
y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
x := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(addFlags32(int32(x), y))
return true
}
return false
}
func rewriteValueARM64_OpARM64CMNconst(v *Value) bool {
v_0 := v.Args[0]
// match: (CMNconst [c] y)
// cond: c < 0 && c != -1<<63
// result: (CMPconst [-c] y)
for {
c := auxIntToInt64(v.AuxInt)
y := v_0
if !(c < 0 && c != -1<<63) {
break
}
v.reset(OpARM64CMPconst)
v.AuxInt = int64ToAuxInt(-c)
v.AddArg(y)
return true
}
// match: (CMNconst (MOVDconst [x]) [y])
// result: (FlagConstant [addFlags64(x,y)])
for {
y := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
x := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(addFlags64(x, y))
return true
}
return false
}
func rewriteValueARM64_OpARM64CMNshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMNshiftLL (MOVDconst [c]) x [d])
// result: (CMNconst [c] (SLLconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64CMNconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMNshiftLL x (MOVDconst [c]) [d])
// result: (CMNconst x [int64(uint64(c)<<uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMNconst)
v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMNshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMNshiftRA (MOVDconst [c]) x [d])
// result: (CMNconst [c] (SRAconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64CMNconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMNshiftRA x (MOVDconst [c]) [d])
// result: (CMNconst x [c>>uint64(d)])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMNconst)
v.AuxInt = int64ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMNshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMNshiftRL (MOVDconst [c]) x [d])
// result: (CMNconst [c] (SRLconst <x.Type> x [d]))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64CMNconst)
v.AuxInt = int64ToAuxInt(c)
v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
v0.AuxInt = int64ToAuxInt(d)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMNshiftRL x (MOVDconst [c]) [d])
// result: (CMNconst x [int64(uint64(c)>>uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMNconst)
v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMP(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMP x (MOVDconst [c]))
// result: (CMPconst [c] x)
for {
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMPconst)
v.AuxInt = int64ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMP (MOVDconst [c]) x)
// result: (InvertFlags (CMPconst [c] x))
for {
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMP x y)
// cond: canonLessThan(x,y)
// result: (InvertFlags (CMP y x))
for {
x := v_0
y := v_1
if !(canonLessThan(x, y)) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
v0.AddArg2(y, x)
v.AddArg(v0)
return true
}
// match: (CMP x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMPshiftLL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64CMPshiftLL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (CMP x0:(SLLconst [c] y) x1)
// cond: clobberIfDead(x0)
// result: (InvertFlags (CMPshiftLL x1 y [c]))
for {
x0 := v_0
if x0.Op != OpARM64SLLconst {
break
}
c := auxIntToInt64(x0.AuxInt)
y := x0.Args[0]
x1 := v_1
if !(clobberIfDead(x0)) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v0.AddArg2(x1, y)
v.AddArg(v0)
return true
}
// match: (CMP x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMPshiftRL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64CMPshiftRL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (CMP x0:(SRLconst [c] y) x1)
// cond: clobberIfDead(x0)
// result: (InvertFlags (CMPshiftRL x1 y [c]))
for {
x0 := v_0
if x0.Op != OpARM64SRLconst {
break
}
c := auxIntToInt64(x0.AuxInt)
y := x0.Args[0]
x1 := v_1
if !(clobberIfDead(x0)) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v0.AddArg2(x1, y)
v.AddArg(v0)
return true
}
// match: (CMP x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (CMPshiftRA x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64CMPshiftRA)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (CMP x0:(SRAconst [c] y) x1)
// cond: clobberIfDead(x0)
// result: (InvertFlags (CMPshiftRA x1 y [c]))
for {
x0 := v_0
if x0.Op != OpARM64SRAconst {
break
}
c := auxIntToInt64(x0.AuxInt)
y := x0.Args[0]
x1 := v_1
if !(clobberIfDead(x0)) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v0.AddArg2(x1, y)
v.AddArg(v0)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPW(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMPW x (MOVDconst [c]))
// result: (CMPWconst [int32(c)] x)
for {
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMPWconst)
v.AuxInt = int32ToAuxInt(int32(c))
v.AddArg(x)
return true
}
// match: (CMPW (MOVDconst [c]) x)
// result: (InvertFlags (CMPWconst [int32(c)] x))
for {
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
v0.AuxInt = int32ToAuxInt(int32(c))
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMPW x y)
// cond: canonLessThan(x,y)
// result: (InvertFlags (CMPW y x))
for {
x := v_0
y := v_1
if !(canonLessThan(x, y)) {
break
}
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
v0.AddArg2(y, x)
v.AddArg(v0)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPWconst(v *Value) bool {
v_0 := v.Args[0]
// match: (CMPWconst [c] y)
// cond: c < 0 && c != -1<<31
// result: (CMNWconst [-c] y)
for {
c := auxIntToInt32(v.AuxInt)
y := v_0
if !(c < 0 && c != -1<<31) {
break
}
v.reset(OpARM64CMNWconst)
v.AuxInt = int32ToAuxInt(-c)
v.AddArg(y)
return true
}
// match: (CMPWconst (MOVDconst [x]) [y])
// result: (FlagConstant [subFlags32(int32(x),y)])
for {
y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
x := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags32(int32(x), y))
return true
}
// match: (CMPWconst (MOVBUreg _) [c])
// cond: 0xff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPWconst (MOVHUreg _) [c])
// cond: 0xffff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPconst(v *Value) bool {
v_0 := v.Args[0]
// match: (CMPconst [c] y)
// cond: c < 0 && c != -1<<63
// result: (CMNconst [-c] y)
for {
c := auxIntToInt64(v.AuxInt)
y := v_0
if !(c < 0 && c != -1<<63) {
break
}
v.reset(OpARM64CMNconst)
v.AuxInt = int64ToAuxInt(-c)
v.AddArg(y)
return true
}
// match: (CMPconst (MOVDconst [x]) [y])
// result: (FlagConstant [subFlags64(x,y)])
for {
y := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
x := auxIntToInt64(v_0.AuxInt)
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(x, y))
return true
}
// match: (CMPconst (MOVBUreg _) [c])
// cond: 0xff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVBUreg || !(0xff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPconst (MOVHUreg _) [c])
// cond: 0xffff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPconst (MOVWUreg _) [c])
// cond: 0xffffffff < c
// result: (FlagConstant [subFlags64(0,1)])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVWUreg || !(0xffffffff < c) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPconst (ANDconst _ [m]) [n])
// cond: 0 <= m && m < n
// result: (FlagConstant [subFlags64(0,1)])
for {
n := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64ANDconst {
break
}
m := auxIntToInt64(v_0.AuxInt)
if !(0 <= m && m < n) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
// match: (CMPconst (SRLconst _ [c]) [n])
// cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)
// result: (FlagConstant [subFlags64(0,1)])
for {
n := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SRLconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
break
}
v.reset(OpARM64FlagConstant)
v.AuxInt = flagConstantToAuxInt(subFlags64(0, 1))
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMPshiftLL (MOVDconst [c]) x [d])
// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
v1.AuxInt = int64ToAuxInt(d)
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftLL x (MOVDconst [c]) [d])
// result: (CMPconst x [int64(uint64(c)<<uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMPconst)
v.AuxInt = int64ToAuxInt(int64(uint64(c) << uint64(d)))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMPshiftRA (MOVDconst [c]) x [d])
// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
v1.AuxInt = int64ToAuxInt(d)
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftRA x (MOVDconst [c]) [d])
// result: (CMPconst x [c>>uint64(d)])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMPconst)
v.AuxInt = int64ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
// match: (CMPshiftRL (MOVDconst [c]) x [d])
// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
for {
d := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
x := v_1
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
v1.AuxInt = int64ToAuxInt(d)
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftRL x (MOVDconst [c]) [d])
// result: (CMPconst x [int64(uint64(c)>>uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64CMPconst)
v.AuxInt = int64ToAuxInt(int64(uint64(c) >> uint64(d)))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CSEL(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CSEL [cc] (MOVDconst [-1]) (MOVDconst [0]) flag)
// result: (CSETM [cc] flag)
for {
cc := auxIntToOp(v.AuxInt)
if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != -1 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
break
}
flag := v_2
v.reset(OpARM64CSETM)
v.AuxInt = opToAuxInt(cc)
v.AddArg(flag)
return true
}
// match: (CSEL [cc] (MOVDconst [0]) (MOVDconst [-1]) flag)
// result: (CSETM [arm64Negate(cc)] flag)
for {
cc := auxIntToOp(v.AuxInt)
if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 || v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != -1 {
break
}
flag := v_2
v.reset(OpARM64CSETM)
v.AuxInt = opToAuxInt(arm64Negate(cc))
v.AddArg(flag)
return true
}
// match: (CSEL [cc] x (MOVDconst [0]) flag)
// result: (CSEL0 [cc] x flag)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
break
}
flag := v_2
v.reset(OpARM64CSEL0)
v.AuxInt = opToAuxInt(cc)
v.AddArg2(x, flag)
return true
}
// match: (CSEL [cc] (MOVDconst [0]) y flag)
// result: (CSEL0 [arm64Negate(cc)] y flag)
for {
cc := auxIntToOp(v.AuxInt)
if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
break
}
y := v_1
flag := v_2
v.reset(OpARM64CSEL0)
v.AuxInt = opToAuxInt(arm64Negate(cc))
v.AddArg2(y, flag)
return true
}
// match: (CSEL [cc] x (ADDconst [1] a) flag)
// result: (CSINC [cc] x a flag)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
if v_1.Op != OpARM64ADDconst || auxIntToInt64(v_1.AuxInt) != 1 {
break
}
a := v_1.Args[0]
flag := v_2
v.reset(OpARM64CSINC)
v.AuxInt = opToAuxInt(cc)
v.AddArg3(x, a, flag)
return true
}
// match: (CSEL [cc] (ADDconst [1] a) x flag)
// result: (CSINC [arm64Negate(cc)] x a flag)
for {
cc := auxIntToOp(v.AuxInt)
if v_0.Op != OpARM64ADDconst || auxIntToInt64(v_0.AuxInt) != 1 {
break
}
a := v_0.Args[0]
x := v_1
flag := v_2
v.reset(OpARM64CSINC)
v.AuxInt = opToAuxInt(arm64Negate(cc))
v.AddArg3(x, a, flag)
return true
}
// match: (CSEL [cc] x (MVN a) flag)
// result: (CSINV [cc] x a flag)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MVN {
break
}
a := v_1.Args[0]
flag := v_2
v.reset(OpARM64CSINV)
v.AuxInt = opToAuxInt(cc)
v.AddArg3(x, a, flag)
return true
}
// match: (CSEL [cc] (MVN a) x flag)
// result: (CSINV [arm64Negate(cc)] x a flag)
for {
cc := auxIntToOp(v.AuxInt)
if v_0.Op != OpARM64MVN {
break
}
a := v_0.Args[0]
x := v_1
flag := v_2
v.reset(OpARM64CSINV)
v.AuxInt = opToAuxInt(arm64Negate(cc))
v.AddArg3(x, a, flag)
return true
}
// match: (CSEL [cc] x (NEG a) flag)
// result: (CSNEG [cc] x a flag)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
if v_1.Op != OpARM64NEG {
break
}
a := v_1.Args[0]
flag := v_2
v.reset(OpARM64CSNEG)
v.AuxInt = opToAuxInt(cc)
v.AddArg3(x, a, flag)
return true
}
// match: (CSEL [cc] (NEG a) x flag)
// result: (CSNEG [arm64Negate(cc)] x a flag)
for {
cc := auxIntToOp(v.AuxInt)
if v_0.Op != OpARM64NEG {
break
}
a := v_0.Args[0]
x := v_1
flag := v_2
v.reset(OpARM64CSNEG)
v.AuxInt = opToAuxInt(arm64Negate(cc))
v.AddArg3(x, a, flag)
return true
}
// match: (CSEL [cc] x y (InvertFlags cmp))
// result: (CSEL [arm64Invert(cc)] x y cmp)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
y := v_1
if v_2.Op != OpARM64InvertFlags {
break
}
cmp := v_2.Args[0]
v.reset(OpARM64CSEL)
v.AuxInt = opToAuxInt(arm64Invert(cc))
v.AddArg3(x, y, cmp)
return true
}
// match: (CSEL [cc] x _ flag)
// cond: ccARM64Eval(cc, flag) > 0
// result: x
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
flag := v_2
if !(ccARM64Eval(cc, flag) > 0) {
break
}
v.copyOf(x)
return true
}
// match: (CSEL [cc] _ y flag)
// cond: ccARM64Eval(cc, flag) < 0
// result: y
for {
cc := auxIntToOp(v.AuxInt)
y := v_1
flag := v_2
if !(ccARM64Eval(cc, flag) < 0) {
break
}
v.copyOf(y)
return true
}
// match: (CSEL [cc] x y (CMPWconst [0] boolval))
// cond: cc == OpARM64NotEqual && flagArg(boolval) != nil
// result: (CSEL [boolval.Op] x y flagArg(boolval))
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
y := v_1
if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
break
}
boolval := v_2.Args[0]
if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
break
}
v.reset(OpARM64CSEL)
v.AuxInt = opToAuxInt(boolval.Op)
v.AddArg3(x, y, flagArg(boolval))
return true
}
// match: (CSEL [cc] x y (CMPWconst [0] boolval))
// cond: cc == OpARM64Equal && flagArg(boolval) != nil
// result: (CSEL [arm64Negate(boolval.Op)] x y flagArg(boolval))
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
y := v_1
if v_2.Op != OpARM64CMPWconst || auxIntToInt32(v_2.AuxInt) != 0 {
break
}
boolval := v_2.Args[0]
if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
break
}
v.reset(OpARM64CSEL)
v.AuxInt = opToAuxInt(arm64Negate(boolval.Op))
v.AddArg3(x, y, flagArg(boolval))
return true
}
return false
}
func rewriteValueARM64_OpARM64CSEL0(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CSEL0 [cc] x (InvertFlags cmp))
// result: (CSEL0 [arm64Invert(cc)] x cmp)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
if v_1.Op != OpARM64InvertFlags {
break
}
cmp := v_1.Args[0]
v.reset(OpARM64CSEL0)
v.AuxInt = opToAuxInt(arm64Invert(cc))
v.AddArg2(x, cmp)
return true
}
// match: (CSEL0 [cc] x flag)
// cond: ccARM64Eval(cc, flag) > 0
// result: x
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
flag := v_1
if !(ccARM64Eval(cc, flag) > 0) {
break
}
v.copyOf(x)
return true
}
// match: (CSEL0 [cc] _ flag)
// cond: ccARM64Eval(cc, flag) < 0
// result: (MOVDconst [0])
for {
cc := auxIntToOp(v.AuxInt)
flag := v_1
if !(ccARM64Eval(cc, flag) < 0) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (CSEL0 [cc] x (CMPWconst [0] boolval))
// cond: cc == OpARM64NotEqual && flagArg(boolval) != nil
// result: (CSEL0 [boolval.Op] x flagArg(boolval))
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
break
}
boolval := v_1.Args[0]
if !(cc == OpARM64NotEqual && flagArg(boolval) != nil) {
break
}
v.reset(OpARM64CSEL0)
v.AuxInt = opToAuxInt(boolval.Op)
v.AddArg2(x, flagArg(boolval))
return true
}
// match: (CSEL0 [cc] x (CMPWconst [0] boolval))
// cond: cc == OpARM64Equal && flagArg(boolval) != nil
// result: (CSEL0 [arm64Negate(boolval.Op)] x flagArg(boolval))
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
if v_1.Op != OpARM64CMPWconst || auxIntToInt32(v_1.AuxInt) != 0 {
break
}
boolval := v_1.Args[0]
if !(cc == OpARM64Equal && flagArg(boolval) != nil) {
break
}
v.reset(OpARM64CSEL0)
v.AuxInt = opToAuxInt(arm64Negate(boolval.Op))
v.AddArg2(x, flagArg(boolval))
return true
}
return false
}
func rewriteValueARM64_OpARM64CSETM(v *Value) bool {
v_0 := v.Args[0]
// match: (CSETM [cc] (InvertFlags cmp))
// result: (CSETM [arm64Invert(cc)] cmp)
for {
cc := auxIntToOp(v.AuxInt)
if v_0.Op != OpARM64InvertFlags {
break
}
cmp := v_0.Args[0]
v.reset(OpARM64CSETM)
v.AuxInt = opToAuxInt(arm64Invert(cc))
v.AddArg(cmp)
return true
}
// match: (CSETM [cc] flag)
// cond: ccARM64Eval(cc, flag) > 0
// result: (MOVDconst [-1])
for {
cc := auxIntToOp(v.AuxInt)
flag := v_0
if !(ccARM64Eval(cc, flag) > 0) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(-1)
return true
}
// match: (CSETM [cc] flag)
// cond: ccARM64Eval(cc, flag) < 0
// result: (MOVDconst [0])
for {
cc := auxIntToOp(v.AuxInt)
flag := v_0
if !(ccARM64Eval(cc, flag) < 0) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(0)
return true
}
return false
}
func rewriteValueARM64_OpARM64CSINC(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CSINC [cc] x y (InvertFlags cmp))
// result: (CSINC [arm64Invert(cc)] x y cmp)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
y := v_1
if v_2.Op != OpARM64InvertFlags {
break
}
cmp := v_2.Args[0]
v.reset(OpARM64CSINC)
v.AuxInt = opToAuxInt(arm64Invert(cc))
v.AddArg3(x, y, cmp)
return true
}
// match: (CSINC [cc] x _ flag)
// cond: ccARM64Eval(cc, flag) > 0
// result: x
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
flag := v_2
if !(ccARM64Eval(cc, flag) > 0) {
break
}
v.copyOf(x)
return true
}
// match: (CSINC [cc] _ y flag)
// cond: ccARM64Eval(cc, flag) < 0
// result: (ADDconst [1] y)
for {
cc := auxIntToOp(v.AuxInt)
y := v_1
flag := v_2
if !(ccARM64Eval(cc, flag) < 0) {
break
}
v.reset(OpARM64ADDconst)
v.AuxInt = int64ToAuxInt(1)
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64CSINV(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CSINV [cc] x y (InvertFlags cmp))
// result: (CSINV [arm64Invert(cc)] x y cmp)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
y := v_1
if v_2.Op != OpARM64InvertFlags {
break
}
cmp := v_2.Args[0]
v.reset(OpARM64CSINV)
v.AuxInt = opToAuxInt(arm64Invert(cc))
v.AddArg3(x, y, cmp)
return true
}
// match: (CSINV [cc] x _ flag)
// cond: ccARM64Eval(cc, flag) > 0
// result: x
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
flag := v_2
if !(ccARM64Eval(cc, flag) > 0) {
break
}
v.copyOf(x)
return true
}
// match: (CSINV [cc] _ y flag)
// cond: ccARM64Eval(cc, flag) < 0
// result: (Not y)
for {
cc := auxIntToOp(v.AuxInt)
y := v_1
flag := v_2
if !(ccARM64Eval(cc, flag) < 0) {
break
}
v.reset(OpNot)
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64CSNEG(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CSNEG [cc] x y (InvertFlags cmp))
// result: (CSNEG [arm64Invert(cc)] x y cmp)
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
y := v_1
if v_2.Op != OpARM64InvertFlags {
break
}
cmp := v_2.Args[0]
v.reset(OpARM64CSNEG)
v.AuxInt = opToAuxInt(arm64Invert(cc))
v.AddArg3(x, y, cmp)
return true
}
// match: (CSNEG [cc] x _ flag)
// cond: ccARM64Eval(cc, flag) > 0
// result: x
for {
cc := auxIntToOp(v.AuxInt)
x := v_0
flag := v_2
if !(ccARM64Eval(cc, flag) > 0) {
break
}
v.copyOf(x)
return true
}
// match: (CSNEG [cc] _ y flag)
// cond: ccARM64Eval(cc, flag) < 0
// result: (NEG y)
for {
cc := auxIntToOp(v.AuxInt)
y := v_1
flag := v_2
if !(ccARM64Eval(cc, flag) < 0) {
break
}
v.reset(OpARM64NEG)
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64DIV(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (DIV (MOVDconst [c]) (MOVDconst [d]))
// cond: d != 0
// result: (MOVDconst [c/d])
for {
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
if v_1.Op != OpARM64MOVDconst {
break
}
d := auxIntToInt64(v_1.AuxInt)
if !(d != 0) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(c / d)
return true
}
return false
}
func rewriteValueARM64_OpARM64DIVW(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (DIVW (MOVDconst [c]) (MOVDconst [d]))
// cond: d != 0
// result: (MOVDconst [int64(uint32(int32(c)/int32(d)))])
for {
if v_0.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_0.AuxInt)
if v_1.Op != OpARM64MOVDconst {
break
}
d := auxIntToInt64(v_1.AuxInt)
if !(d != 0) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(int64(uint32(int32(c) / int32(d))))
return true
}
return false
}
func rewriteValueARM64_OpARM64EON(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EON x (MOVDconst [c]))
// result: (XORconst [^c] x)
for {
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64XORconst)
v.AuxInt = int64ToAuxInt(^c)
v.AddArg(x)
return true
}
// match: (EON x x)
// result: (MOVDconst [-1])
for {
x := v_0
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(-1)
return true
}
// match: (EON x0 x1:(SLLconst [c] y))
// cond: clobberIfDead(x1)
// result: (EONshiftLL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SLLconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64EONshiftLL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (EON x0 x1:(SRLconst [c] y))
// cond: clobberIfDead(x1)
// result: (EONshiftRL x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRLconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64EONshiftRL)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (EON x0 x1:(SRAconst [c] y))
// cond: clobberIfDead(x1)
// result: (EONshiftRA x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64SRAconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64EONshiftRA)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
// match: (EON x0 x1:(RORconst [c] y))
// cond: clobberIfDead(x1)
// result: (EONshiftRO x0 y [c])
for {
x0 := v_0
x1 := v_1
if x1.Op != OpARM64RORconst {
break
}
c := auxIntToInt64(x1.AuxInt)
y := x1.Args[0]
if !(clobberIfDead(x1)) {
break
}
v.reset(OpARM64EONshiftRO)
v.AuxInt = int64ToAuxInt(c)
v.AddArg2(x0, y)
return true
}
return false
}
func rewriteValueARM64_OpARM64EONshiftLL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EONshiftLL x (MOVDconst [c]) [d])
// result: (XORconst x [^int64(uint64(c)<<uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64XORconst)
v.AuxInt = int64ToAuxInt(^int64(uint64(c) << uint64(d)))
v.AddArg(x)
return true
}
// match: (EONshiftLL (SLLconst x [c]) x [c])
// result: (MOVDconst [-1])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SLLconst || auxIntToInt64(v_0.AuxInt) != c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(-1)
return true
}
return false
}
func rewriteValueARM64_OpARM64EONshiftRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EONshiftRA x (MOVDconst [c]) [d])
// result: (XORconst x [^(c>>uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64XORconst)
v.AuxInt = int64ToAuxInt(^(c >> uint64(d)))
v.AddArg(x)
return true
}
// match: (EONshiftRA (SRAconst x [c]) x [c])
// result: (MOVDconst [-1])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SRAconst || auxIntToInt64(v_0.AuxInt) != c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(-1)
return true
}
return false
}
func rewriteValueARM64_OpARM64EONshiftRL(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EONshiftRL x (MOVDconst [c]) [d])
// result: (XORconst x [^int64(uint64(c)>>uint64(d))])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64XORconst)
v.AuxInt = int64ToAuxInt(^int64(uint64(c) >> uint64(d)))
v.AddArg(x)
return true
}
// match: (EONshiftRL (SRLconst x [c]) x [c])
// result: (MOVDconst [-1])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64SRLconst || auxIntToInt64(v_0.AuxInt) != c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(-1)
return true
}
return false
}
func rewriteValueARM64_OpARM64EONshiftRO(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (EONshiftRO x (MOVDconst [c]) [d])
// result: (XORconst x [^rotateRight64(c, d)])
for {
d := auxIntToInt64(v.AuxInt)
x := v_0
if v_1.Op != OpARM64MOVDconst {
break
}
c := auxIntToInt64(v_1.AuxInt)
v.reset(OpARM64XORconst)
v.AuxInt = int64ToAuxInt(^rotateRight64(c, d))
v.AddArg(x)
return true
}
// match: (EONshiftRO (RORconst x [c]) x [c])
// result: (MOVDconst [-1])
for {
c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpARM64RORconst || auxIntToInt64(v_0.AuxInt) != c {
break
}
x := v_0.Args[0]
if x != v_1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(-1)
return true
}
return false
}
func rewriteValueARM64_OpARM64Equal(v *Value) bool {
v_0 := v.Args[0]
b := v.Block
// match: (Equal (CMPconst [0] z:(AND x y)))
// cond: z.Uses == 1
// result: (Equal (TST x y))
for {
if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
break
}
z := v_0.Args[0]
if z.Op != OpARM64AND {
break
}
y := z.Args[1]
x := z.Args[0]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags)
v0.AddArg2(x, y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPWconst [0] x:(ANDconst [c] y)))
// cond: x.Uses == 1
// result: (Equal (TSTWconst [int32(c)] y))
for {
if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
break
}
x := v_0.Args[0]
if x.Op != OpARM64ANDconst {
break
}
c := auxIntToInt64(x.AuxInt)
y := x.Args[0]
if !(x.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags)
v0.AuxInt = int32ToAuxInt(int32(c))
v0.AddArg(y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPWconst [0] z:(AND x y)))
// cond: z.Uses == 1
// result: (Equal (TSTW x y))
for {
if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
break
}
z := v_0.Args[0]
if z.Op != OpARM64AND {
break
}
y := z.Args[1]
x := z.Args[0]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags)
v0.AddArg2(x, y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPconst [0] x:(ANDconst [c] y)))
// cond: x.Uses == 1
// result: (Equal (TSTconst [c] y))
for {
if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
break
}
x := v_0.Args[0]
if x.Op != OpARM64ANDconst {
break
}
c := auxIntToInt64(x.AuxInt)
y := x.Args[0]
if !(x.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v0.AddArg(y)
v.AddArg(v0)
return true
}
// match: (Equal (CMP x z:(NEG y)))
// cond: z.Uses == 1
// result: (Equal (CMN x y))
for {
if v_0.Op != OpARM64CMP {
break
}
_ = v_0.Args[1]
x := v_0.Args[0]
z := v_0.Args[1]
if z.Op != OpARM64NEG {
break
}
y := z.Args[0]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
v0.AddArg2(x, y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPW x z:(NEG y)))
// cond: z.Uses == 1
// result: (Equal (CMNW x y))
for {
if v_0.Op != OpARM64CMPW {
break
}
_ = v_0.Args[1]
x := v_0.Args[0]
z := v_0.Args[1]
if z.Op != OpARM64NEG {
break
}
y := z.Args[0]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
v0.AddArg2(x, y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPconst [0] x:(ADDconst [c] y)))
// cond: x.Uses == 1
// result: (Equal (CMNconst [c] y))
for {
if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
break
}
x := v_0.Args[0]
if x.Op != OpARM64ADDconst {
break
}
c := auxIntToInt64(x.AuxInt)
y := x.Args[0]
if !(x.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
v0.AuxInt = int64ToAuxInt(c)
v0.AddArg(y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPWconst [0] x:(ADDconst [c] y)))
// cond: x.Uses == 1
// result: (Equal (CMNWconst [int32(c)] y))
for {
if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
break
}
x := v_0.Args[0]
if x.Op != OpARM64ADDconst {
break
}
c := auxIntToInt64(x.AuxInt)
y := x.Args[0]
if !(x.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
v0.AuxInt = int32ToAuxInt(int32(c))
v0.AddArg(y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPconst [0] z:(ADD x y)))
// cond: z.Uses == 1
// result: (Equal (CMN x y))
for {
if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
break
}
z := v_0.Args[0]
if z.Op != OpARM64ADD {
break
}
y := z.Args[1]
x := z.Args[0]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
v0.AddArg2(x, y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPWconst [0] z:(ADD x y)))
// cond: z.Uses == 1
// result: (Equal (CMNW x y))
for {
if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
break
}
z := v_0.Args[0]
if z.Op != OpARM64ADD {
break
}
y := z.Args[1]
x := z.Args[0]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
v0.AddArg2(x, y)
v.AddArg(v0)
return true
}
// match: (Equal (CMPconst [0] z:(MADD a x y)))
// cond: z.Uses == 1
// result: (Equal (CMN a (MUL <x.Type> x y)))
for {
if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
break
}
z := v_0.Args[0]
if z.Op != OpARM64MADD {
break
}
y := z.Args[2]
a := z.Args[0]
x := z.Args[1]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
v1.AddArg2(x, y)
v0.AddArg2(a, v1)
v.AddArg(v0)
return true
}
// match: (Equal (CMPconst [0] z:(MSUB a x y)))
// cond: z.Uses == 1
// result: (Equal (CMP a (MUL <x.Type> x y)))
for {
if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
break
}
z := v_0.Args[0]
if z.Op != OpARM64MSUB {
break
}
y := z.Args[2]
a := z.Args[0]
x := z.Args[1]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
v1.AddArg2(x, y)
v0.AddArg2(a, v1)
v.AddArg(v0)
return true
}
// match: (Equal (CMPWconst [0] z:(MADDW a x y)))
// cond: z.Uses == 1
// result: (Equal (CMNW a (MULW <x.Type> x y)))
for {
if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
break
}
z := v_0.Args[0]
if z.Op != OpARM64MADDW {
break
}
y := z.Args[2]
a := z.Args[0]
x := z.Args[1]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
v1.AddArg2(x, y)
v0.AddArg2(a, v1)
v.AddArg(v0)
return true
}
// match: (Equal (CMPWconst [0] z:(MSUBW a x y)))
// cond: z.Uses == 1
// result: (Equal (CMPW a (MULW <x.Type> x y)))
for {
if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
break
}
z := v_0.Args[0]
if z.Op != OpARM64MSUBW {
break
}
y := z.Args[2]
a := z.Args[0]
x := z.Args[1]
if !(z.Uses == 1) {
break
}
v.reset(OpARM64Equal)
v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
v1.AddArg2(x, y)
v0.AddArg2(a, v1)
v.AddArg(v0)
return true
}
// match: (Equal (FlagConstant [fc]))
// result: (MOVDconst [b2i(fc.eq())])
for {
if v_0.Op != OpARM64FlagConstant {
break
}
fc := auxIntToFlagConstant(v_0.AuxInt)
v.reset(OpARM64MOVDconst)
v.AuxInt = int64ToAuxInt(b2i(fc.eq()))
return true
}
// match: (Equal (InvertFlags x))
// result: (Equal x)
for {
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64Equal)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64FADDD(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (FADDD a (FMULD x y))
// cond: a.Block.Func.useFMA(v)
// result: (FMADDD a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
if v_1.Op != OpARM64FMULD {
continue
}
y := v_1.Args[1]
x := v_1.Args[0]
if !(a.Block.Func.useFMA(v)) {
continue
}
v.reset(OpARM64FMADDD)
v.AddArg3(a, x, y)
return true
}
break
}
// match: (FADDD a (FNMULD x y))
// cond: a.Block.Func.useFMA(v)
// result: (FMSUBD a x y)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
a := v_0
if v_1.Op != OpARM64FNMULD {
continue
}
y := v_1.Args[1]
x := v_1.Args[0]
if !(a.Block.Func.useFMA(v)) {
continue
}
v.reset(OpARM64FMSUBD)
v.AddArg3(a, x, y)
return true
}
break
}
return false
}
func rewriteValueARM64_OpARM64FADDS(v *Value) bool {
v_1 := v.Args[1]