blob: dd5aa28d3c8dd50c2d4266d93112aef7ccb0c7d2 [file] [log] [blame]
// autogenerated from gen/ARM64.rules: do not edit!
// generated with: cd gen; go run *.go
package ssa
import "math"
var _ = math.MinInt8 // in case not otherwise used
func rewriteValueARM64(v *Value, config *Config) bool {
switch v.Op {
case OpARM64ADD:
return rewriteValueARM64_OpARM64ADD(v, config)
case OpARM64ADDconst:
return rewriteValueARM64_OpARM64ADDconst(v, config)
case OpARM64ADDshiftLL:
return rewriteValueARM64_OpARM64ADDshiftLL(v, config)
case OpARM64ADDshiftRA:
return rewriteValueARM64_OpARM64ADDshiftRA(v, config)
case OpARM64ADDshiftRL:
return rewriteValueARM64_OpARM64ADDshiftRL(v, config)
case OpARM64AND:
return rewriteValueARM64_OpARM64AND(v, config)
case OpARM64ANDconst:
return rewriteValueARM64_OpARM64ANDconst(v, config)
case OpARM64ANDshiftLL:
return rewriteValueARM64_OpARM64ANDshiftLL(v, config)
case OpARM64ANDshiftRA:
return rewriteValueARM64_OpARM64ANDshiftRA(v, config)
case OpARM64ANDshiftRL:
return rewriteValueARM64_OpARM64ANDshiftRL(v, config)
case OpARM64BIC:
return rewriteValueARM64_OpARM64BIC(v, config)
case OpARM64BICconst:
return rewriteValueARM64_OpARM64BICconst(v, config)
case OpARM64BICshiftLL:
return rewriteValueARM64_OpARM64BICshiftLL(v, config)
case OpARM64BICshiftRA:
return rewriteValueARM64_OpARM64BICshiftRA(v, config)
case OpARM64BICshiftRL:
return rewriteValueARM64_OpARM64BICshiftRL(v, config)
case OpARM64CMP:
return rewriteValueARM64_OpARM64CMP(v, config)
case OpARM64CMPW:
return rewriteValueARM64_OpARM64CMPW(v, config)
case OpARM64CMPWconst:
return rewriteValueARM64_OpARM64CMPWconst(v, config)
case OpARM64CMPconst:
return rewriteValueARM64_OpARM64CMPconst(v, config)
case OpARM64CMPshiftLL:
return rewriteValueARM64_OpARM64CMPshiftLL(v, config)
case OpARM64CMPshiftRA:
return rewriteValueARM64_OpARM64CMPshiftRA(v, config)
case OpARM64CMPshiftRL:
return rewriteValueARM64_OpARM64CMPshiftRL(v, config)
case OpARM64CSELULT:
return rewriteValueARM64_OpARM64CSELULT(v, config)
case OpARM64CSELULT0:
return rewriteValueARM64_OpARM64CSELULT0(v, config)
case OpARM64DIV:
return rewriteValueARM64_OpARM64DIV(v, config)
case OpARM64DIVW:
return rewriteValueARM64_OpARM64DIVW(v, config)
case OpARM64Equal:
return rewriteValueARM64_OpARM64Equal(v, config)
case OpARM64FMOVDload:
return rewriteValueARM64_OpARM64FMOVDload(v, config)
case OpARM64FMOVDstore:
return rewriteValueARM64_OpARM64FMOVDstore(v, config)
case OpARM64FMOVSload:
return rewriteValueARM64_OpARM64FMOVSload(v, config)
case OpARM64FMOVSstore:
return rewriteValueARM64_OpARM64FMOVSstore(v, config)
case OpARM64GreaterEqual:
return rewriteValueARM64_OpARM64GreaterEqual(v, config)
case OpARM64GreaterEqualU:
return rewriteValueARM64_OpARM64GreaterEqualU(v, config)
case OpARM64GreaterThan:
return rewriteValueARM64_OpARM64GreaterThan(v, config)
case OpARM64GreaterThanU:
return rewriteValueARM64_OpARM64GreaterThanU(v, config)
case OpARM64LessEqual:
return rewriteValueARM64_OpARM64LessEqual(v, config)
case OpARM64LessEqualU:
return rewriteValueARM64_OpARM64LessEqualU(v, config)
case OpARM64LessThan:
return rewriteValueARM64_OpARM64LessThan(v, config)
case OpARM64LessThanU:
return rewriteValueARM64_OpARM64LessThanU(v, config)
case OpARM64MOD:
return rewriteValueARM64_OpARM64MOD(v, config)
case OpARM64MODW:
return rewriteValueARM64_OpARM64MODW(v, config)
case OpARM64MOVBUload:
return rewriteValueARM64_OpARM64MOVBUload(v, config)
case OpARM64MOVBUreg:
return rewriteValueARM64_OpARM64MOVBUreg(v, config)
case OpARM64MOVBload:
return rewriteValueARM64_OpARM64MOVBload(v, config)
case OpARM64MOVBreg:
return rewriteValueARM64_OpARM64MOVBreg(v, config)
case OpARM64MOVBstore:
return rewriteValueARM64_OpARM64MOVBstore(v, config)
case OpARM64MOVBstorezero:
return rewriteValueARM64_OpARM64MOVBstorezero(v, config)
case OpARM64MOVDload:
return rewriteValueARM64_OpARM64MOVDload(v, config)
case OpARM64MOVDreg:
return rewriteValueARM64_OpARM64MOVDreg(v, config)
case OpARM64MOVDstore:
return rewriteValueARM64_OpARM64MOVDstore(v, config)
case OpARM64MOVDstorezero:
return rewriteValueARM64_OpARM64MOVDstorezero(v, config)
case OpARM64MOVHUload:
return rewriteValueARM64_OpARM64MOVHUload(v, config)
case OpARM64MOVHUreg:
return rewriteValueARM64_OpARM64MOVHUreg(v, config)
case OpARM64MOVHload:
return rewriteValueARM64_OpARM64MOVHload(v, config)
case OpARM64MOVHreg:
return rewriteValueARM64_OpARM64MOVHreg(v, config)
case OpARM64MOVHstore:
return rewriteValueARM64_OpARM64MOVHstore(v, config)
case OpARM64MOVHstorezero:
return rewriteValueARM64_OpARM64MOVHstorezero(v, config)
case OpARM64MOVWUload:
return rewriteValueARM64_OpARM64MOVWUload(v, config)
case OpARM64MOVWUreg:
return rewriteValueARM64_OpARM64MOVWUreg(v, config)
case OpARM64MOVWload:
return rewriteValueARM64_OpARM64MOVWload(v, config)
case OpARM64MOVWreg:
return rewriteValueARM64_OpARM64MOVWreg(v, config)
case OpARM64MOVWstore:
return rewriteValueARM64_OpARM64MOVWstore(v, config)
case OpARM64MOVWstorezero:
return rewriteValueARM64_OpARM64MOVWstorezero(v, config)
case OpARM64MUL:
return rewriteValueARM64_OpARM64MUL(v, config)
case OpARM64MULW:
return rewriteValueARM64_OpARM64MULW(v, config)
case OpARM64MVN:
return rewriteValueARM64_OpARM64MVN(v, config)
case OpARM64NEG:
return rewriteValueARM64_OpARM64NEG(v, config)
case OpARM64NotEqual:
return rewriteValueARM64_OpARM64NotEqual(v, config)
case OpARM64OR:
return rewriteValueARM64_OpARM64OR(v, config)
case OpARM64ORconst:
return rewriteValueARM64_OpARM64ORconst(v, config)
case OpARM64ORshiftLL:
return rewriteValueARM64_OpARM64ORshiftLL(v, config)
case OpARM64ORshiftRA:
return rewriteValueARM64_OpARM64ORshiftRA(v, config)
case OpARM64ORshiftRL:
return rewriteValueARM64_OpARM64ORshiftRL(v, config)
case OpARM64SLL:
return rewriteValueARM64_OpARM64SLL(v, config)
case OpARM64SLLconst:
return rewriteValueARM64_OpARM64SLLconst(v, config)
case OpARM64SRA:
return rewriteValueARM64_OpARM64SRA(v, config)
case OpARM64SRAconst:
return rewriteValueARM64_OpARM64SRAconst(v, config)
case OpARM64SRL:
return rewriteValueARM64_OpARM64SRL(v, config)
case OpARM64SRLconst:
return rewriteValueARM64_OpARM64SRLconst(v, config)
case OpARM64SUB:
return rewriteValueARM64_OpARM64SUB(v, config)
case OpARM64SUBconst:
return rewriteValueARM64_OpARM64SUBconst(v, config)
case OpARM64SUBshiftLL:
return rewriteValueARM64_OpARM64SUBshiftLL(v, config)
case OpARM64SUBshiftRA:
return rewriteValueARM64_OpARM64SUBshiftRA(v, config)
case OpARM64SUBshiftRL:
return rewriteValueARM64_OpARM64SUBshiftRL(v, config)
case OpARM64UDIV:
return rewriteValueARM64_OpARM64UDIV(v, config)
case OpARM64UDIVW:
return rewriteValueARM64_OpARM64UDIVW(v, config)
case OpARM64UMOD:
return rewriteValueARM64_OpARM64UMOD(v, config)
case OpARM64UMODW:
return rewriteValueARM64_OpARM64UMODW(v, config)
case OpARM64XOR:
return rewriteValueARM64_OpARM64XOR(v, config)
case OpARM64XORconst:
return rewriteValueARM64_OpARM64XORconst(v, config)
case OpARM64XORshiftLL:
return rewriteValueARM64_OpARM64XORshiftLL(v, config)
case OpARM64XORshiftRA:
return rewriteValueARM64_OpARM64XORshiftRA(v, config)
case OpARM64XORshiftRL:
return rewriteValueARM64_OpARM64XORshiftRL(v, config)
case OpAdd16:
return rewriteValueARM64_OpAdd16(v, config)
case OpAdd32:
return rewriteValueARM64_OpAdd32(v, config)
case OpAdd32F:
return rewriteValueARM64_OpAdd32F(v, config)
case OpAdd64:
return rewriteValueARM64_OpAdd64(v, config)
case OpAdd64F:
return rewriteValueARM64_OpAdd64F(v, config)
case OpAdd8:
return rewriteValueARM64_OpAdd8(v, config)
case OpAddPtr:
return rewriteValueARM64_OpAddPtr(v, config)
case OpAddr:
return rewriteValueARM64_OpAddr(v, config)
case OpAnd16:
return rewriteValueARM64_OpAnd16(v, config)
case OpAnd32:
return rewriteValueARM64_OpAnd32(v, config)
case OpAnd64:
return rewriteValueARM64_OpAnd64(v, config)
case OpAnd8:
return rewriteValueARM64_OpAnd8(v, config)
case OpAndB:
return rewriteValueARM64_OpAndB(v, config)
case OpAtomicAdd32:
return rewriteValueARM64_OpAtomicAdd32(v, config)
case OpAtomicAdd64:
return rewriteValueARM64_OpAtomicAdd64(v, config)
case OpAtomicAnd8:
return rewriteValueARM64_OpAtomicAnd8(v, config)
case OpAtomicCompareAndSwap32:
return rewriteValueARM64_OpAtomicCompareAndSwap32(v, config)
case OpAtomicCompareAndSwap64:
return rewriteValueARM64_OpAtomicCompareAndSwap64(v, config)
case OpAtomicExchange32:
return rewriteValueARM64_OpAtomicExchange32(v, config)
case OpAtomicExchange64:
return rewriteValueARM64_OpAtomicExchange64(v, config)
case OpAtomicLoad32:
return rewriteValueARM64_OpAtomicLoad32(v, config)
case OpAtomicLoad64:
return rewriteValueARM64_OpAtomicLoad64(v, config)
case OpAtomicLoadPtr:
return rewriteValueARM64_OpAtomicLoadPtr(v, config)
case OpAtomicOr8:
return rewriteValueARM64_OpAtomicOr8(v, config)
case OpAtomicStore32:
return rewriteValueARM64_OpAtomicStore32(v, config)
case OpAtomicStore64:
return rewriteValueARM64_OpAtomicStore64(v, config)
case OpAtomicStorePtrNoWB:
return rewriteValueARM64_OpAtomicStorePtrNoWB(v, config)
case OpAvg64u:
return rewriteValueARM64_OpAvg64u(v, config)
case OpBswap32:
return rewriteValueARM64_OpBswap32(v, config)
case OpBswap64:
return rewriteValueARM64_OpBswap64(v, config)
case OpClosureCall:
return rewriteValueARM64_OpClosureCall(v, config)
case OpCom16:
return rewriteValueARM64_OpCom16(v, config)
case OpCom32:
return rewriteValueARM64_OpCom32(v, config)
case OpCom64:
return rewriteValueARM64_OpCom64(v, config)
case OpCom8:
return rewriteValueARM64_OpCom8(v, config)
case OpConst16:
return rewriteValueARM64_OpConst16(v, config)
case OpConst32:
return rewriteValueARM64_OpConst32(v, config)
case OpConst32F:
return rewriteValueARM64_OpConst32F(v, config)
case OpConst64:
return rewriteValueARM64_OpConst64(v, config)
case OpConst64F:
return rewriteValueARM64_OpConst64F(v, config)
case OpConst8:
return rewriteValueARM64_OpConst8(v, config)
case OpConstBool:
return rewriteValueARM64_OpConstBool(v, config)
case OpConstNil:
return rewriteValueARM64_OpConstNil(v, config)
case OpConvert:
return rewriteValueARM64_OpConvert(v, config)
case OpCtz32:
return rewriteValueARM64_OpCtz32(v, config)
case OpCtz64:
return rewriteValueARM64_OpCtz64(v, config)
case OpCvt32Fto32:
return rewriteValueARM64_OpCvt32Fto32(v, config)
case OpCvt32Fto32U:
return rewriteValueARM64_OpCvt32Fto32U(v, config)
case OpCvt32Fto64:
return rewriteValueARM64_OpCvt32Fto64(v, config)
case OpCvt32Fto64F:
return rewriteValueARM64_OpCvt32Fto64F(v, config)
case OpCvt32Fto64U:
return rewriteValueARM64_OpCvt32Fto64U(v, config)
case OpCvt32Uto32F:
return rewriteValueARM64_OpCvt32Uto32F(v, config)
case OpCvt32Uto64F:
return rewriteValueARM64_OpCvt32Uto64F(v, config)
case OpCvt32to32F:
return rewriteValueARM64_OpCvt32to32F(v, config)
case OpCvt32to64F:
return rewriteValueARM64_OpCvt32to64F(v, config)
case OpCvt64Fto32:
return rewriteValueARM64_OpCvt64Fto32(v, config)
case OpCvt64Fto32F:
return rewriteValueARM64_OpCvt64Fto32F(v, config)
case OpCvt64Fto32U:
return rewriteValueARM64_OpCvt64Fto32U(v, config)
case OpCvt64Fto64:
return rewriteValueARM64_OpCvt64Fto64(v, config)
case OpCvt64Fto64U:
return rewriteValueARM64_OpCvt64Fto64U(v, config)
case OpCvt64Uto32F:
return rewriteValueARM64_OpCvt64Uto32F(v, config)
case OpCvt64Uto64F:
return rewriteValueARM64_OpCvt64Uto64F(v, config)
case OpCvt64to32F:
return rewriteValueARM64_OpCvt64to32F(v, config)
case OpCvt64to64F:
return rewriteValueARM64_OpCvt64to64F(v, config)
case OpDeferCall:
return rewriteValueARM64_OpDeferCall(v, config)
case OpDiv16:
return rewriteValueARM64_OpDiv16(v, config)
case OpDiv16u:
return rewriteValueARM64_OpDiv16u(v, config)
case OpDiv32:
return rewriteValueARM64_OpDiv32(v, config)
case OpDiv32F:
return rewriteValueARM64_OpDiv32F(v, config)
case OpDiv32u:
return rewriteValueARM64_OpDiv32u(v, config)
case OpDiv64:
return rewriteValueARM64_OpDiv64(v, config)
case OpDiv64F:
return rewriteValueARM64_OpDiv64F(v, config)
case OpDiv64u:
return rewriteValueARM64_OpDiv64u(v, config)
case OpDiv8:
return rewriteValueARM64_OpDiv8(v, config)
case OpDiv8u:
return rewriteValueARM64_OpDiv8u(v, config)
case OpEq16:
return rewriteValueARM64_OpEq16(v, config)
case OpEq32:
return rewriteValueARM64_OpEq32(v, config)
case OpEq32F:
return rewriteValueARM64_OpEq32F(v, config)
case OpEq64:
return rewriteValueARM64_OpEq64(v, config)
case OpEq64F:
return rewriteValueARM64_OpEq64F(v, config)
case OpEq8:
return rewriteValueARM64_OpEq8(v, config)
case OpEqB:
return rewriteValueARM64_OpEqB(v, config)
case OpEqPtr:
return rewriteValueARM64_OpEqPtr(v, config)
case OpGeq16:
return rewriteValueARM64_OpGeq16(v, config)
case OpGeq16U:
return rewriteValueARM64_OpGeq16U(v, config)
case OpGeq32:
return rewriteValueARM64_OpGeq32(v, config)
case OpGeq32F:
return rewriteValueARM64_OpGeq32F(v, config)
case OpGeq32U:
return rewriteValueARM64_OpGeq32U(v, config)
case OpGeq64:
return rewriteValueARM64_OpGeq64(v, config)
case OpGeq64F:
return rewriteValueARM64_OpGeq64F(v, config)
case OpGeq64U:
return rewriteValueARM64_OpGeq64U(v, config)
case OpGeq8:
return rewriteValueARM64_OpGeq8(v, config)
case OpGeq8U:
return rewriteValueARM64_OpGeq8U(v, config)
case OpGetClosurePtr:
return rewriteValueARM64_OpGetClosurePtr(v, config)
case OpGoCall:
return rewriteValueARM64_OpGoCall(v, config)
case OpGreater16:
return rewriteValueARM64_OpGreater16(v, config)
case OpGreater16U:
return rewriteValueARM64_OpGreater16U(v, config)
case OpGreater32:
return rewriteValueARM64_OpGreater32(v, config)
case OpGreater32F:
return rewriteValueARM64_OpGreater32F(v, config)
case OpGreater32U:
return rewriteValueARM64_OpGreater32U(v, config)
case OpGreater64:
return rewriteValueARM64_OpGreater64(v, config)
case OpGreater64F:
return rewriteValueARM64_OpGreater64F(v, config)
case OpGreater64U:
return rewriteValueARM64_OpGreater64U(v, config)
case OpGreater8:
return rewriteValueARM64_OpGreater8(v, config)
case OpGreater8U:
return rewriteValueARM64_OpGreater8U(v, config)
case OpHmul16:
return rewriteValueARM64_OpHmul16(v, config)
case OpHmul16u:
return rewriteValueARM64_OpHmul16u(v, config)
case OpHmul32:
return rewriteValueARM64_OpHmul32(v, config)
case OpHmul32u:
return rewriteValueARM64_OpHmul32u(v, config)
case OpHmul64:
return rewriteValueARM64_OpHmul64(v, config)
case OpHmul64u:
return rewriteValueARM64_OpHmul64u(v, config)
case OpHmul8:
return rewriteValueARM64_OpHmul8(v, config)
case OpHmul8u:
return rewriteValueARM64_OpHmul8u(v, config)
case OpInterCall:
return rewriteValueARM64_OpInterCall(v, config)
case OpIsInBounds:
return rewriteValueARM64_OpIsInBounds(v, config)
case OpIsNonNil:
return rewriteValueARM64_OpIsNonNil(v, config)
case OpIsSliceInBounds:
return rewriteValueARM64_OpIsSliceInBounds(v, config)
case OpLeq16:
return rewriteValueARM64_OpLeq16(v, config)
case OpLeq16U:
return rewriteValueARM64_OpLeq16U(v, config)
case OpLeq32:
return rewriteValueARM64_OpLeq32(v, config)
case OpLeq32F:
return rewriteValueARM64_OpLeq32F(v, config)
case OpLeq32U:
return rewriteValueARM64_OpLeq32U(v, config)
case OpLeq64:
return rewriteValueARM64_OpLeq64(v, config)
case OpLeq64F:
return rewriteValueARM64_OpLeq64F(v, config)
case OpLeq64U:
return rewriteValueARM64_OpLeq64U(v, config)
case OpLeq8:
return rewriteValueARM64_OpLeq8(v, config)
case OpLeq8U:
return rewriteValueARM64_OpLeq8U(v, config)
case OpLess16:
return rewriteValueARM64_OpLess16(v, config)
case OpLess16U:
return rewriteValueARM64_OpLess16U(v, config)
case OpLess32:
return rewriteValueARM64_OpLess32(v, config)
case OpLess32F:
return rewriteValueARM64_OpLess32F(v, config)
case OpLess32U:
return rewriteValueARM64_OpLess32U(v, config)
case OpLess64:
return rewriteValueARM64_OpLess64(v, config)
case OpLess64F:
return rewriteValueARM64_OpLess64F(v, config)
case OpLess64U:
return rewriteValueARM64_OpLess64U(v, config)
case OpLess8:
return rewriteValueARM64_OpLess8(v, config)
case OpLess8U:
return rewriteValueARM64_OpLess8U(v, config)
case OpLoad:
return rewriteValueARM64_OpLoad(v, config)
case OpLrot16:
return rewriteValueARM64_OpLrot16(v, config)
case OpLrot32:
return rewriteValueARM64_OpLrot32(v, config)
case OpLrot64:
return rewriteValueARM64_OpLrot64(v, config)
case OpLrot8:
return rewriteValueARM64_OpLrot8(v, config)
case OpLsh16x16:
return rewriteValueARM64_OpLsh16x16(v, config)
case OpLsh16x32:
return rewriteValueARM64_OpLsh16x32(v, config)
case OpLsh16x64:
return rewriteValueARM64_OpLsh16x64(v, config)
case OpLsh16x8:
return rewriteValueARM64_OpLsh16x8(v, config)
case OpLsh32x16:
return rewriteValueARM64_OpLsh32x16(v, config)
case OpLsh32x32:
return rewriteValueARM64_OpLsh32x32(v, config)
case OpLsh32x64:
return rewriteValueARM64_OpLsh32x64(v, config)
case OpLsh32x8:
return rewriteValueARM64_OpLsh32x8(v, config)
case OpLsh64x16:
return rewriteValueARM64_OpLsh64x16(v, config)
case OpLsh64x32:
return rewriteValueARM64_OpLsh64x32(v, config)
case OpLsh64x64:
return rewriteValueARM64_OpLsh64x64(v, config)
case OpLsh64x8:
return rewriteValueARM64_OpLsh64x8(v, config)
case OpLsh8x16:
return rewriteValueARM64_OpLsh8x16(v, config)
case OpLsh8x32:
return rewriteValueARM64_OpLsh8x32(v, config)
case OpLsh8x64:
return rewriteValueARM64_OpLsh8x64(v, config)
case OpLsh8x8:
return rewriteValueARM64_OpLsh8x8(v, config)
case OpMod16:
return rewriteValueARM64_OpMod16(v, config)
case OpMod16u:
return rewriteValueARM64_OpMod16u(v, config)
case OpMod32:
return rewriteValueARM64_OpMod32(v, config)
case OpMod32u:
return rewriteValueARM64_OpMod32u(v, config)
case OpMod64:
return rewriteValueARM64_OpMod64(v, config)
case OpMod64u:
return rewriteValueARM64_OpMod64u(v, config)
case OpMod8:
return rewriteValueARM64_OpMod8(v, config)
case OpMod8u:
return rewriteValueARM64_OpMod8u(v, config)
case OpMove:
return rewriteValueARM64_OpMove(v, config)
case OpMul16:
return rewriteValueARM64_OpMul16(v, config)
case OpMul32:
return rewriteValueARM64_OpMul32(v, config)
case OpMul32F:
return rewriteValueARM64_OpMul32F(v, config)
case OpMul64:
return rewriteValueARM64_OpMul64(v, config)
case OpMul64F:
return rewriteValueARM64_OpMul64F(v, config)
case OpMul8:
return rewriteValueARM64_OpMul8(v, config)
case OpNeg16:
return rewriteValueARM64_OpNeg16(v, config)
case OpNeg32:
return rewriteValueARM64_OpNeg32(v, config)
case OpNeg32F:
return rewriteValueARM64_OpNeg32F(v, config)
case OpNeg64:
return rewriteValueARM64_OpNeg64(v, config)
case OpNeg64F:
return rewriteValueARM64_OpNeg64F(v, config)
case OpNeg8:
return rewriteValueARM64_OpNeg8(v, config)
case OpNeq16:
return rewriteValueARM64_OpNeq16(v, config)
case OpNeq32:
return rewriteValueARM64_OpNeq32(v, config)
case OpNeq32F:
return rewriteValueARM64_OpNeq32F(v, config)
case OpNeq64:
return rewriteValueARM64_OpNeq64(v, config)
case OpNeq64F:
return rewriteValueARM64_OpNeq64F(v, config)
case OpNeq8:
return rewriteValueARM64_OpNeq8(v, config)
case OpNeqB:
return rewriteValueARM64_OpNeqB(v, config)
case OpNeqPtr:
return rewriteValueARM64_OpNeqPtr(v, config)
case OpNilCheck:
return rewriteValueARM64_OpNilCheck(v, config)
case OpNot:
return rewriteValueARM64_OpNot(v, config)
case OpOffPtr:
return rewriteValueARM64_OpOffPtr(v, config)
case OpOr16:
return rewriteValueARM64_OpOr16(v, config)
case OpOr32:
return rewriteValueARM64_OpOr32(v, config)
case OpOr64:
return rewriteValueARM64_OpOr64(v, config)
case OpOr8:
return rewriteValueARM64_OpOr8(v, config)
case OpOrB:
return rewriteValueARM64_OpOrB(v, config)
case OpRsh16Ux16:
return rewriteValueARM64_OpRsh16Ux16(v, config)
case OpRsh16Ux32:
return rewriteValueARM64_OpRsh16Ux32(v, config)
case OpRsh16Ux64:
return rewriteValueARM64_OpRsh16Ux64(v, config)
case OpRsh16Ux8:
return rewriteValueARM64_OpRsh16Ux8(v, config)
case OpRsh16x16:
return rewriteValueARM64_OpRsh16x16(v, config)
case OpRsh16x32:
return rewriteValueARM64_OpRsh16x32(v, config)
case OpRsh16x64:
return rewriteValueARM64_OpRsh16x64(v, config)
case OpRsh16x8:
return rewriteValueARM64_OpRsh16x8(v, config)
case OpRsh32Ux16:
return rewriteValueARM64_OpRsh32Ux16(v, config)
case OpRsh32Ux32:
return rewriteValueARM64_OpRsh32Ux32(v, config)
case OpRsh32Ux64:
return rewriteValueARM64_OpRsh32Ux64(v, config)
case OpRsh32Ux8:
return rewriteValueARM64_OpRsh32Ux8(v, config)
case OpRsh32x16:
return rewriteValueARM64_OpRsh32x16(v, config)
case OpRsh32x32:
return rewriteValueARM64_OpRsh32x32(v, config)
case OpRsh32x64:
return rewriteValueARM64_OpRsh32x64(v, config)
case OpRsh32x8:
return rewriteValueARM64_OpRsh32x8(v, config)
case OpRsh64Ux16:
return rewriteValueARM64_OpRsh64Ux16(v, config)
case OpRsh64Ux32:
return rewriteValueARM64_OpRsh64Ux32(v, config)
case OpRsh64Ux64:
return rewriteValueARM64_OpRsh64Ux64(v, config)
case OpRsh64Ux8:
return rewriteValueARM64_OpRsh64Ux8(v, config)
case OpRsh64x16:
return rewriteValueARM64_OpRsh64x16(v, config)
case OpRsh64x32:
return rewriteValueARM64_OpRsh64x32(v, config)
case OpRsh64x64:
return rewriteValueARM64_OpRsh64x64(v, config)
case OpRsh64x8:
return rewriteValueARM64_OpRsh64x8(v, config)
case OpRsh8Ux16:
return rewriteValueARM64_OpRsh8Ux16(v, config)
case OpRsh8Ux32:
return rewriteValueARM64_OpRsh8Ux32(v, config)
case OpRsh8Ux64:
return rewriteValueARM64_OpRsh8Ux64(v, config)
case OpRsh8Ux8:
return rewriteValueARM64_OpRsh8Ux8(v, config)
case OpRsh8x16:
return rewriteValueARM64_OpRsh8x16(v, config)
case OpRsh8x32:
return rewriteValueARM64_OpRsh8x32(v, config)
case OpRsh8x64:
return rewriteValueARM64_OpRsh8x64(v, config)
case OpRsh8x8:
return rewriteValueARM64_OpRsh8x8(v, config)
case OpSignExt16to32:
return rewriteValueARM64_OpSignExt16to32(v, config)
case OpSignExt16to64:
return rewriteValueARM64_OpSignExt16to64(v, config)
case OpSignExt32to64:
return rewriteValueARM64_OpSignExt32to64(v, config)
case OpSignExt8to16:
return rewriteValueARM64_OpSignExt8to16(v, config)
case OpSignExt8to32:
return rewriteValueARM64_OpSignExt8to32(v, config)
case OpSignExt8to64:
return rewriteValueARM64_OpSignExt8to64(v, config)
case OpSlicemask:
return rewriteValueARM64_OpSlicemask(v, config)
case OpSqrt:
return rewriteValueARM64_OpSqrt(v, config)
case OpStaticCall:
return rewriteValueARM64_OpStaticCall(v, config)
case OpStore:
return rewriteValueARM64_OpStore(v, config)
case OpSub16:
return rewriteValueARM64_OpSub16(v, config)
case OpSub32:
return rewriteValueARM64_OpSub32(v, config)
case OpSub32F:
return rewriteValueARM64_OpSub32F(v, config)
case OpSub64:
return rewriteValueARM64_OpSub64(v, config)
case OpSub64F:
return rewriteValueARM64_OpSub64F(v, config)
case OpSub8:
return rewriteValueARM64_OpSub8(v, config)
case OpSubPtr:
return rewriteValueARM64_OpSubPtr(v, config)
case OpTrunc16to8:
return rewriteValueARM64_OpTrunc16to8(v, config)
case OpTrunc32to16:
return rewriteValueARM64_OpTrunc32to16(v, config)
case OpTrunc32to8:
return rewriteValueARM64_OpTrunc32to8(v, config)
case OpTrunc64to16:
return rewriteValueARM64_OpTrunc64to16(v, config)
case OpTrunc64to32:
return rewriteValueARM64_OpTrunc64to32(v, config)
case OpTrunc64to8:
return rewriteValueARM64_OpTrunc64to8(v, config)
case OpXor16:
return rewriteValueARM64_OpXor16(v, config)
case OpXor32:
return rewriteValueARM64_OpXor32(v, config)
case OpXor64:
return rewriteValueARM64_OpXor64(v, config)
case OpXor8:
return rewriteValueARM64_OpXor8(v, config)
case OpZero:
return rewriteValueARM64_OpZero(v, config)
case OpZeroExt16to32:
return rewriteValueARM64_OpZeroExt16to32(v, config)
case OpZeroExt16to64:
return rewriteValueARM64_OpZeroExt16to64(v, config)
case OpZeroExt32to64:
return rewriteValueARM64_OpZeroExt32to64(v, config)
case OpZeroExt8to16:
return rewriteValueARM64_OpZeroExt8to16(v, config)
case OpZeroExt8to32:
return rewriteValueARM64_OpZeroExt8to32(v, config)
case OpZeroExt8to64:
return rewriteValueARM64_OpZeroExt8to64(v, config)
}
return false
}
func rewriteValueARM64_OpARM64ADD(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ADD (MOVDconst [c]) x)
// cond:
// result: (ADDconst [c] x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64ADDconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (ADD x (MOVDconst [c]))
// cond:
// result: (ADDconst [c] x)
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ADDconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (ADD x (NEG y))
// cond:
// result: (SUB x y)
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64NEG {
break
}
y := v_1.Args[0]
v.reset(OpARM64SUB)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADD (NEG y) x)
// cond:
// result: (SUB x y)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64NEG {
break
}
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64SUB)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADD x (SLLconst [c] y))
// cond:
// result: (ADDshiftLL x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SLLconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64ADDshiftLL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADD (SLLconst [c] y) x)
// cond:
// result: (ADDshiftLL x y [c])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SLLconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64ADDshiftLL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADD x (SRLconst [c] y))
// cond:
// result: (ADDshiftRL x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRLconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64ADDshiftRL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADD (SRLconst [c] y) x)
// cond:
// result: (ADDshiftRL x y [c])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SRLconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64ADDshiftRL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADD x (SRAconst [c] y))
// cond:
// result: (ADDshiftRA x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRAconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64ADDshiftRA)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (ADD (SRAconst [c] y) x)
// cond:
// result: (ADDshiftRA x y [c])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SRAconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64ADDshiftRA)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDconst(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
// cond:
// result: (MOVDaddr [off1+off2] {sym} ptr)
for {
off1 := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym := v_0.Aux
ptr := v_0.Args[0]
v.reset(OpARM64MOVDaddr)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
return true
}
// match: (ADDconst [0] x)
// cond:
// result: x
for {
if v.AuxInt != 0 {
break
}
x := v.Args[0]
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
return true
}
// match: (ADDconst [c] (MOVDconst [d]))
// cond:
// result: (MOVDconst [c+d])
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
d := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = c + d
return true
}
// match: (ADDconst [c] (ADDconst [d] x))
// cond:
// result: (ADDconst [c+d] x)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
d := v_0.AuxInt
x := v_0.Args[0]
v.reset(OpARM64ADDconst)
v.AuxInt = c + d
v.AddArg(x)
return true
}
// match: (ADDconst [c] (SUBconst [d] x))
// cond:
// result: (ADDconst [c-d] x)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64SUBconst {
break
}
d := v_0.AuxInt
x := v_0.Args[0]
v.reset(OpARM64ADDconst)
v.AuxInt = c - d
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftLL(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ADDshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64ADDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Line, OpARM64SLLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftLL x (MOVDconst [c]) [d])
// cond:
// result: (ADDconst x [int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ADDconst)
v.AuxInt = int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftRA(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ADDshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64ADDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Line, OpARM64SRAconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftRA x (MOVDconst [c]) [d])
// cond:
// result: (ADDconst x [int64(int64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ADDconst)
v.AuxInt = int64(int64(c) >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ADDshiftRL(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ADDshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64ADDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Line, OpARM64SRLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ADDshiftRL x (MOVDconst [c]) [d])
// cond:
// result: (ADDconst x [int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ADDconst)
v.AuxInt = int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64AND(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (AND (MOVDconst [c]) x)
// cond:
// result: (ANDconst [c] x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64ANDconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (AND x (MOVDconst [c]))
// cond:
// result: (ANDconst [c] x)
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (AND x x)
// cond:
// result: x
for {
x := v.Args[0]
if x != v.Args[1] {
break
}
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
return true
}
// match: (AND x (MVN y))
// cond:
// result: (BIC x y)
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MVN {
break
}
y := v_1.Args[0]
v.reset(OpARM64BIC)
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (AND x (SLLconst [c] y))
// cond:
// result: (ANDshiftLL x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SLLconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64ANDshiftLL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (AND (SLLconst [c] y) x)
// cond:
// result: (ANDshiftLL x y [c])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SLLconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64ANDshiftLL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (AND x (SRLconst [c] y))
// cond:
// result: (ANDshiftRL x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRLconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64ANDshiftRL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (AND (SRLconst [c] y) x)
// cond:
// result: (ANDshiftRL x y [c])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SRLconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64ANDshiftRL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (AND x (SRAconst [c] y))
// cond:
// result: (ANDshiftRA x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRAconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64ANDshiftRA)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (AND (SRAconst [c] y) x)
// cond:
// result: (ANDshiftRA x y [c])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SRAconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64ANDshiftRA)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDconst(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ANDconst [0] _)
// cond:
// result: (MOVDconst [0])
for {
if v.AuxInt != 0 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (ANDconst [-1] x)
// cond:
// result: x
for {
if v.AuxInt != -1 {
break
}
x := v.Args[0]
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
return true
}
// match: (ANDconst [c] (MOVDconst [d]))
// cond:
// result: (MOVDconst [c&d])
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
d := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = c & d
return true
}
// match: (ANDconst [c] (ANDconst [d] x))
// cond:
// result: (ANDconst [c&d] x)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64ANDconst {
break
}
d := v_0.AuxInt
x := v_0.Args[0]
v.reset(OpARM64ANDconst)
v.AuxInt = c & d
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftLL(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ANDshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64ANDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Line, OpARM64SLLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftLL x (MOVDconst [c]) [d])
// cond:
// result: (ANDconst x [int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
// match: (ANDshiftLL x y:(SLLconst x [c]) [d])
// cond: c==d
// result: y
for {
d := v.AuxInt
x := v.Args[0]
y := v.Args[1]
if y.Op != OpARM64SLLconst {
break
}
c := y.AuxInt
if x != y.Args[0] {
break
}
if !(c == d) {
break
}
v.reset(OpCopy)
v.Type = y.Type
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftRA(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ANDshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64ANDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Line, OpARM64SRAconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftRA x (MOVDconst [c]) [d])
// cond:
// result: (ANDconst x [int64(int64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = int64(int64(c) >> uint64(d))
v.AddArg(x)
return true
}
// match: (ANDshiftRA x y:(SRAconst x [c]) [d])
// cond: c==d
// result: y
for {
d := v.AuxInt
x := v.Args[0]
y := v.Args[1]
if y.Op != OpARM64SRAconst {
break
}
c := y.AuxInt
if x != y.Args[0] {
break
}
if !(c == d) {
break
}
v.reset(OpCopy)
v.Type = y.Type
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64ANDshiftRL(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ANDshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64ANDconst)
v.AuxInt = c
v0 := b.NewValue0(v.Line, OpARM64SRLconst, x.Type)
v0.AuxInt = d
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (ANDshiftRL x (MOVDconst [c]) [d])
// cond:
// result: (ANDconst x [int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64ANDconst)
v.AuxInt = int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
// match: (ANDshiftRL x y:(SRLconst x [c]) [d])
// cond: c==d
// result: y
for {
d := v.AuxInt
x := v.Args[0]
y := v.Args[1]
if y.Op != OpARM64SRLconst {
break
}
c := y.AuxInt
if x != y.Args[0] {
break
}
if !(c == d) {
break
}
v.reset(OpCopy)
v.Type = y.Type
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64BIC(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (BIC x (MOVDconst [c]))
// cond:
// result: (BICconst [c] x)
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64BICconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (BIC x x)
// cond:
// result: (MOVDconst [0])
for {
x := v.Args[0]
if x != v.Args[1] {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (BIC x (SLLconst [c] y))
// cond:
// result: (BICshiftLL x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SLLconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64BICshiftLL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (BIC x (SRLconst [c] y))
// cond:
// result: (BICshiftRL x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRLconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64BICshiftRL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (BIC x (SRAconst [c] y))
// cond:
// result: (BICshiftRA x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRAconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64BICshiftRA)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64BICconst(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (BICconst [0] x)
// cond:
// result: x
for {
if v.AuxInt != 0 {
break
}
x := v.Args[0]
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
return true
}
// match: (BICconst [-1] _)
// cond:
// result: (MOVDconst [0])
for {
if v.AuxInt != -1 {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (BICconst [c] (MOVDconst [d]))
// cond:
// result: (MOVDconst [d&^c])
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
d := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = d &^ c
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftLL(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (BICshiftLL x (MOVDconst [c]) [d])
// cond:
// result: (BICconst x [int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64BICconst)
v.AuxInt = int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
// match: (BICshiftLL x (SLLconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [0])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SLLconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] {
break
}
if !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftRA(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (BICshiftRA x (MOVDconst [c]) [d])
// cond:
// result: (BICconst x [int64(int64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64BICconst)
v.AuxInt = int64(int64(c) >> uint64(d))
v.AddArg(x)
return true
}
// match: (BICshiftRA x (SRAconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [0])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRAconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] {
break
}
if !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64BICshiftRL(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (BICshiftRL x (MOVDconst [c]) [d])
// cond:
// result: (BICconst x [int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64BICconst)
v.AuxInt = int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
// match: (BICshiftRL x (SRLconst x [c]) [d])
// cond: c==d
// result: (MOVDconst [0])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRLconst {
break
}
c := v_1.AuxInt
if x != v_1.Args[0] {
break
}
if !(c == d) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64CMP(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CMP x (MOVDconst [c]))
// cond:
// result: (CMPconst [c] x)
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (CMP (MOVDconst [c]) x)
// cond:
// result: (InvertFlags (CMPconst [c] x))
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
v0.AuxInt = c
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (CMP x (SLLconst [c] y))
// cond:
// result: (CMPshiftLL x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SLLconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64CMPshiftLL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (CMP (SLLconst [c] y) x)
// cond:
// result: (InvertFlags (CMPshiftLL x y [c]))
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SLLconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Line, OpARM64CMPshiftLL, TypeFlags)
v0.AuxInt = c
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
return true
}
// match: (CMP x (SRLconst [c] y))
// cond:
// result: (CMPshiftRL x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRLconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64CMPshiftRL)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (CMP (SRLconst [c] y) x)
// cond:
// result: (InvertFlags (CMPshiftRL x y [c]))
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SRLconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Line, OpARM64CMPshiftRL, TypeFlags)
v0.AuxInt = c
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
return true
}
// match: (CMP x (SRAconst [c] y))
// cond:
// result: (CMPshiftRA x y [c])
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64SRAconst {
break
}
c := v_1.AuxInt
y := v_1.Args[0]
v.reset(OpARM64CMPshiftRA)
v.AuxInt = c
v.AddArg(x)
v.AddArg(y)
return true
}
// match: (CMP (SRAconst [c] y) x)
// cond:
// result: (InvertFlags (CMPshiftRA x y [c]))
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64SRAconst {
break
}
c := v_0.AuxInt
y := v_0.Args[0]
x := v.Args[1]
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Line, OpARM64CMPshiftRA, TypeFlags)
v0.AuxInt = c
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPW(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CMPW x (MOVDconst [c]))
// cond:
// result: (CMPWconst [int64(int32(c))] x)
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPWconst)
v.AuxInt = int64(int32(c))
v.AddArg(x)
return true
}
// match: (CMPW (MOVDconst [c]) x)
// cond:
// result: (InvertFlags (CMPWconst [int64(int32(c))] x))
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Line, OpARM64CMPWconst, TypeFlags)
v0.AuxInt = int64(int32(c))
v0.AddArg(x)
v.AddArg(v0)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPWconst(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CMPWconst (MOVDconst [x]) [y])
// cond: int32(x)==int32(y)
// result: (FlagEQ)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int32(x) == int32(y)) {
break
}
v.reset(OpARM64FlagEQ)
return true
}
// match: (CMPWconst (MOVDconst [x]) [y])
// cond: int32(x)<int32(y) && uint32(x)<uint32(y)
// result: (FlagLT_ULT)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int32(x) < int32(y) && uint32(x) < uint32(y)) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
// match: (CMPWconst (MOVDconst [x]) [y])
// cond: int32(x)<int32(y) && uint32(x)>uint32(y)
// result: (FlagLT_UGT)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int32(x) < int32(y) && uint32(x) > uint32(y)) {
break
}
v.reset(OpARM64FlagLT_UGT)
return true
}
// match: (CMPWconst (MOVDconst [x]) [y])
// cond: int32(x)>int32(y) && uint32(x)<uint32(y)
// result: (FlagGT_ULT)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int32(x) > int32(y) && uint32(x) < uint32(y)) {
break
}
v.reset(OpARM64FlagGT_ULT)
return true
}
// match: (CMPWconst (MOVDconst [x]) [y])
// cond: int32(x)>int32(y) && uint32(x)>uint32(y)
// result: (FlagGT_UGT)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int32(x) > int32(y) && uint32(x) > uint32(y)) {
break
}
v.reset(OpARM64FlagGT_UGT)
return true
}
// match: (CMPWconst (MOVBUreg _) [c])
// cond: 0xff < int32(c)
// result: (FlagLT_ULT)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVBUreg {
break
}
if !(0xff < int32(c)) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
// match: (CMPWconst (MOVHUreg _) [c])
// cond: 0xffff < int32(c)
// result: (FlagLT_ULT)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVHUreg {
break
}
if !(0xffff < int32(c)) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPconst(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CMPconst (MOVDconst [x]) [y])
// cond: x==y
// result: (FlagEQ)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(x == y) {
break
}
v.reset(OpARM64FlagEQ)
return true
}
// match: (CMPconst (MOVDconst [x]) [y])
// cond: int64(x)<int64(y) && uint64(x)<uint64(y)
// result: (FlagLT_ULT)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int64(x) < int64(y) && uint64(x) < uint64(y)) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
// match: (CMPconst (MOVDconst [x]) [y])
// cond: int64(x)<int64(y) && uint64(x)>uint64(y)
// result: (FlagLT_UGT)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int64(x) < int64(y) && uint64(x) > uint64(y)) {
break
}
v.reset(OpARM64FlagLT_UGT)
return true
}
// match: (CMPconst (MOVDconst [x]) [y])
// cond: int64(x)>int64(y) && uint64(x)<uint64(y)
// result: (FlagGT_ULT)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int64(x) > int64(y) && uint64(x) < uint64(y)) {
break
}
v.reset(OpARM64FlagGT_ULT)
return true
}
// match: (CMPconst (MOVDconst [x]) [y])
// cond: int64(x)>int64(y) && uint64(x)>uint64(y)
// result: (FlagGT_UGT)
for {
y := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
x := v_0.AuxInt
if !(int64(x) > int64(y) && uint64(x) > uint64(y)) {
break
}
v.reset(OpARM64FlagGT_UGT)
return true
}
// match: (CMPconst (MOVBUreg _) [c])
// cond: 0xff < c
// result: (FlagLT_ULT)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVBUreg {
break
}
if !(0xff < c) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
// match: (CMPconst (MOVHUreg _) [c])
// cond: 0xffff < c
// result: (FlagLT_ULT)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVHUreg {
break
}
if !(0xffff < c) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
// match: (CMPconst (MOVWUreg _) [c])
// cond: 0xffffffff < c
// result: (FlagLT_ULT)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVWUreg {
break
}
if !(0xffffffff < c) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
// match: (CMPconst (ANDconst _ [m]) [n])
// cond: 0 <= m && m < n
// result: (FlagLT_ULT)
for {
n := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64ANDconst {
break
}
m := v_0.AuxInt
if !(0 <= m && m < n) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
// match: (CMPconst (SRLconst _ [c]) [n])
// cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)
// result: (FlagLT_ULT)
for {
n := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64SRLconst {
break
}
c := v_0.AuxInt
if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
break
}
v.reset(OpARM64FlagLT_ULT)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftLL(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CMPshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
v0.AuxInt = c
v1 := b.NewValue0(v.Line, OpARM64SLLconst, x.Type)
v1.AuxInt = d
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftLL x (MOVDconst [c]) [d])
// cond:
// result: (CMPconst x [int64(uint64(c)<<uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPconst)
v.AuxInt = int64(uint64(c) << uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftRA(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CMPshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
v0.AuxInt = c
v1 := b.NewValue0(v.Line, OpARM64SRAconst, x.Type)
v1.AuxInt = d
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftRA x (MOVDconst [c]) [d])
// cond:
// result: (CMPconst x [int64(int64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPconst)
v.AuxInt = int64(int64(c) >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CMPshiftRL(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CMPshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
for {
d := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
x := v.Args[1]
v.reset(OpARM64InvertFlags)
v0 := b.NewValue0(v.Line, OpARM64CMPconst, TypeFlags)
v0.AuxInt = c
v1 := b.NewValue0(v.Line, OpARM64SRLconst, x.Type)
v1.AuxInt = d
v1.AddArg(x)
v0.AddArg(v1)
v.AddArg(v0)
return true
}
// match: (CMPshiftRL x (MOVDconst [c]) [d])
// cond:
// result: (CMPconst x [int64(uint64(c)>>uint64(d))])
for {
d := v.AuxInt
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
v.reset(OpARM64CMPconst)
v.AuxInt = int64(uint64(c) >> uint64(d))
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64CSELULT(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CSELULT x (MOVDconst [0]) flag)
// cond:
// result: (CSELULT0 x flag)
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
if v_1.AuxInt != 0 {
break
}
flag := v.Args[2]
v.reset(OpARM64CSELULT0)
v.AddArg(x)
v.AddArg(flag)
return true
}
// match: (CSELULT _ y (FlagEQ))
// cond:
// result: y
for {
y := v.Args[1]
v_2 := v.Args[2]
if v_2.Op != OpARM64FlagEQ {
break
}
v.reset(OpCopy)
v.Type = y.Type
v.AddArg(y)
return true
}
// match: (CSELULT x _ (FlagLT_ULT))
// cond:
// result: x
for {
x := v.Args[0]
v_2 := v.Args[2]
if v_2.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
return true
}
// match: (CSELULT _ y (FlagLT_UGT))
// cond:
// result: y
for {
y := v.Args[1]
v_2 := v.Args[2]
if v_2.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpCopy)
v.Type = y.Type
v.AddArg(y)
return true
}
// match: (CSELULT x _ (FlagGT_ULT))
// cond:
// result: x
for {
x := v.Args[0]
v_2 := v.Args[2]
if v_2.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
return true
}
// match: (CSELULT _ y (FlagGT_UGT))
// cond:
// result: y
for {
y := v.Args[1]
v_2 := v.Args[2]
if v_2.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpCopy)
v.Type = y.Type
v.AddArg(y)
return true
}
return false
}
func rewriteValueARM64_OpARM64CSELULT0(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (CSELULT0 _ (FlagEQ))
// cond:
// result: (MOVDconst [0])
for {
v_1 := v.Args[1]
if v_1.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (CSELULT0 x (FlagLT_ULT))
// cond:
// result: x
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
return true
}
// match: (CSELULT0 _ (FlagLT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_1 := v.Args[1]
if v_1.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (CSELULT0 x (FlagGT_ULT))
// cond:
// result: x
for {
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
return true
}
// match: (CSELULT0 _ (FlagGT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_1 := v.Args[1]
if v_1.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64DIV(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (DIV (MOVDconst [c]) (MOVDconst [d]))
// cond:
// result: (MOVDconst [int64(c)/int64(d)])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
d := v_1.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = int64(c) / int64(d)
return true
}
return false
}
func rewriteValueARM64_OpARM64DIVW(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (DIVW (MOVDconst [c]) (MOVDconst [d]))
// cond:
// result: (MOVDconst [int64(int32(c)/int32(d))])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
d := v_1.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = int64(int32(c) / int32(d))
return true
}
return false
}
func rewriteValueARM64_OpARM64Equal(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (Equal (FlagEQ))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (Equal (FlagLT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Equal (FlagLT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Equal (FlagGT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Equal (FlagGT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Equal (InvertFlags x))
// cond:
// result: (Equal x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64Equal)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64FMOVDload(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: (off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (FMOVDload [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
if !((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64FMOVDload)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64FMOVDload)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
return false
}
func rewriteValueARM64_OpARM64FMOVDstore(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond: (off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (FMOVDstore [off1+off2] {sym} ptr val mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
val := v.Args[1]
mem := v.Args[2]
if !((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64FMOVDstore)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
// match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
val := v.Args[1]
mem := v.Args[2]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64FMOVDstore)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
return false
}
func rewriteValueARM64_OpARM64FMOVSload(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: (off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (FMOVSload [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
if !((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64FMOVSload)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64FMOVSload)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
return false
}
func rewriteValueARM64_OpARM64FMOVSstore(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond: (off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (FMOVSstore [off1+off2] {sym} ptr val mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
val := v.Args[1]
mem := v.Args[2]
if !((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64FMOVSstore)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
// match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%4==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
val := v.Args[1]
mem := v.Args[2]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%4 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64FMOVSstore)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
return false
}
func rewriteValueARM64_OpARM64GreaterEqual(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (GreaterEqual (FlagEQ))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterEqual (FlagLT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterEqual (FlagLT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterEqual (FlagGT_ULT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterEqual (FlagGT_UGT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterEqual (InvertFlags x))
// cond:
// result: (LessEqual x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64LessEqual)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64GreaterEqualU(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (GreaterEqualU (FlagEQ))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterEqualU (FlagLT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterEqualU (FlagLT_UGT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterEqualU (FlagGT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterEqualU (FlagGT_UGT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterEqualU (InvertFlags x))
// cond:
// result: (LessEqualU x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64LessEqualU)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64GreaterThan(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (GreaterThan (FlagEQ))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterThan (FlagLT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterThan (FlagLT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterThan (FlagGT_ULT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterThan (FlagGT_UGT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterThan (InvertFlags x))
// cond:
// result: (LessThan x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64LessThan)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64GreaterThanU(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (GreaterThanU (FlagEQ))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterThanU (FlagLT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterThanU (FlagLT_UGT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterThanU (FlagGT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (GreaterThanU (FlagGT_UGT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (GreaterThanU (InvertFlags x))
// cond:
// result: (LessThanU x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64LessThanU)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64LessEqual(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (LessEqual (FlagEQ))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessEqual (FlagLT_ULT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessEqual (FlagLT_UGT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessEqual (FlagGT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessEqual (FlagGT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessEqual (InvertFlags x))
// cond:
// result: (GreaterEqual x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64GreaterEqual)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64LessEqualU(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (LessEqualU (FlagEQ))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessEqualU (FlagLT_ULT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessEqualU (FlagLT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessEqualU (FlagGT_ULT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessEqualU (FlagGT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessEqualU (InvertFlags x))
// cond:
// result: (GreaterEqualU x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64GreaterEqualU)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64LessThan(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (LessThan (FlagEQ))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessThan (FlagLT_ULT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessThan (FlagLT_UGT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessThan (FlagGT_ULT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessThan (FlagGT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessThan (InvertFlags x))
// cond:
// result: (GreaterThan x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64GreaterThan)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64LessThanU(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (LessThanU (FlagEQ))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagEQ {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessThanU (FlagLT_ULT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessThanU (FlagLT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagLT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessThanU (FlagGT_ULT))
// cond:
// result: (MOVDconst [1])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_ULT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 1
return true
}
// match: (LessThanU (FlagGT_UGT))
// cond:
// result: (MOVDconst [0])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64FlagGT_UGT {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (LessThanU (InvertFlags x))
// cond:
// result: (GreaterThanU x)
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64InvertFlags {
break
}
x := v_0.Args[0]
v.reset(OpARM64GreaterThanU)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64MOD(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOD (MOVDconst [c]) (MOVDconst [d]))
// cond:
// result: (MOVDconst [int64(c)%int64(d)])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
d := v_1.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = int64(c) % int64(d)
return true
}
return false
}
func rewriteValueARM64_OpARM64MODW(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MODW (MOVDconst [c]) (MOVDconst [d]))
// cond:
// result: (MOVDconst [int64(int32(c)%int32(d))])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
d := v_1.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = int64(int32(c) % int32(d))
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVBUload(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond:
// result: (MOVBUload [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVBUload)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2)
// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2)) {
break
}
v.reset(OpARM64MOVBUload)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVDconst [0])
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVBstorezero {
break
}
off2 := v_1.AuxInt
sym2 := v_1.Aux
ptr2 := v_1.Args[0]
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVBUreg(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVBUreg x:(MOVBUload _ _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVBUload {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBUreg x:(MOVBUreg _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVBUreg {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBUreg (MOVDconst [c]))
// cond:
// result: (MOVDconst [int64(uint8(c))])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = int64(uint8(c))
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVBload(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond:
// result: (MOVBload [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVBload)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2)
// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2)) {
break
}
v.reset(OpARM64MOVBload)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVDconst [0])
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVBstorezero {
break
}
off2 := v_1.AuxInt
sym2 := v_1.Aux
ptr2 := v_1.Args[0]
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVBreg(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVBreg x:(MOVBload _ _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVBload {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBreg x:(MOVBreg _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVBreg {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBreg (MOVDconst [c]))
// cond:
// result: (MOVDconst [int64(int8(c))])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = int64(int8(c))
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVBstore(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond:
// result: (MOVBstore [off1+off2] {sym} ptr val mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
val := v.Args[1]
mem := v.Args[2]
v.reset(OpARM64MOVBstore)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
// match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
// cond: canMergeSym(sym1,sym2)
// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
val := v.Args[1]
mem := v.Args[2]
if !(canMergeSym(sym1, sym2)) {
break
}
v.reset(OpARM64MOVBstore)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
// cond:
// result: (MOVBstorezero [off] {sym} ptr mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
if v_1.AuxInt != 0 {
break
}
mem := v.Args[2]
v.reset(OpARM64MOVBstorezero)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
// cond:
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVBreg {
break
}
x := v_1.Args[0]
mem := v.Args[2]
v.reset(OpARM64MOVBstore)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
v.AddArg(x)
v.AddArg(mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
// cond:
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVBUreg {
break
}
x := v_1.Args[0]
mem := v.Args[2]
v.reset(OpARM64MOVBstore)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
v.AddArg(x)
v.AddArg(mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
// cond:
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVHreg {
break
}
x := v_1.Args[0]
mem := v.Args[2]
v.reset(OpARM64MOVBstore)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
v.AddArg(x)
v.AddArg(mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
// cond:
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVHUreg {
break
}
x := v_1.Args[0]
mem := v.Args[2]
v.reset(OpARM64MOVBstore)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
v.AddArg(x)
v.AddArg(mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
// cond:
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVWreg {
break
}
x := v_1.Args[0]
mem := v.Args[2]
v.reset(OpARM64MOVBstore)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
v.AddArg(x)
v.AddArg(mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
// cond:
// result: (MOVBstore [off] {sym} ptr x mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVWUreg {
break
}
x := v_1.Args[0]
mem := v.Args[2]
v.reset(OpARM64MOVBstore)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
v.AddArg(x)
v.AddArg(mem)
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVBstorezero(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
// cond:
// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
v.reset(OpARM64MOVBstorezero)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2)
// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2)) {
break
}
v.reset(OpARM64MOVBstorezero)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVDload(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: (off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (MOVDload [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
if !((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64MOVDload)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64MOVDload)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVDconst [0])
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDstorezero {
break
}
off2 := v_1.AuxInt
sym2 := v_1.Aux
ptr2 := v_1.Args[0]
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVDreg(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVDreg x)
// cond: x.Uses == 1
// result: (MOVDnop x)
for {
x := v.Args[0]
if !(x.Uses == 1) {
break
}
v.reset(OpARM64MOVDnop)
v.AddArg(x)
return true
}
// match: (MOVDreg (MOVDconst [c]))
// cond:
// result: (MOVDconst [c])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = c
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVDstore(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond: (off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (MOVDstore [off1+off2] {sym} ptr val mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
val := v.Args[1]
mem := v.Args[2]
if !((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64MOVDstore)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
// match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
val := v.Args[1]
mem := v.Args[2]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64MOVDstore)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(val)
v.AddArg(mem)
return true
}
// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
// cond:
// result: (MOVDstorezero [off] {sym} ptr mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
if v_1.AuxInt != 0 {
break
}
mem := v.Args[2]
v.reset(OpARM64MOVDstorezero)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVDstorezero(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: (off1+off2)%2==8 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (MOVDstorezero [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
if !((off1+off2)%2 == 8 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64MOVDstorezero)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%8==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%8 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64MOVDstorezero)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVHUload(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: (off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (MOVHUload [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
if !((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64MOVHUload)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64MOVHUload)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVDconst [0])
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVHstorezero {
break
}
off2 := v_1.AuxInt
sym2 := v_1.Aux
ptr2 := v_1.Args[0]
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVHUreg(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVHUreg x:(MOVBUload _ _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVBUload {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHUreg x:(MOVHUload _ _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVHUload {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHUreg x:(MOVBUreg _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVBUreg {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHUreg x:(MOVHUreg _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVHUreg {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHUreg (MOVDconst [c]))
// cond:
// result: (MOVDconst [int64(uint16(c))])
for {
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDconst {
break
}
c := v_0.AuxInt
v.reset(OpARM64MOVDconst)
v.AuxInt = int64(uint16(c))
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVHload(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: (off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym) && !isAuto(sym)
// result: (MOVHload [off1+off2] {sym} ptr mem)
for {
off1 := v.AuxInt
sym := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64ADDconst {
break
}
off2 := v_0.AuxInt
ptr := v_0.Args[0]
mem := v.Args[1]
if !((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym) && !isAuto(sym)) {
break
}
v.reset(OpARM64MOVHload)
v.AuxInt = off1 + off2
v.Aux = sym
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
// cond: canMergeSym(sym1,sym2) && ((off1+off2)%2==0 || off1+off2<256 && off1+off2>-256 && !isArg(sym1) && !isAuto(sym1))
// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
for {
off1 := v.AuxInt
sym1 := v.Aux
v_0 := v.Args[0]
if v_0.Op != OpARM64MOVDaddr {
break
}
off2 := v_0.AuxInt
sym2 := v_0.Aux
ptr := v_0.Args[0]
mem := v.Args[1]
if !(canMergeSym(sym1, sym2) && ((off1+off2)%2 == 0 || off1+off2 < 256 && off1+off2 > -256 && !isArg(sym1) && !isAuto(sym1))) {
break
}
v.reset(OpARM64MOVHload)
v.AuxInt = off1 + off2
v.Aux = mergeSym(sym1, sym2)
v.AddArg(ptr)
v.AddArg(mem)
return true
}
// match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVDconst [0])
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVHstorezero {
break
}
off2 := v_1.AuxInt
sym2 := v_1.Aux
ptr2 := v_1.Args[0]
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
return false
}
func rewriteValueARM64_OpARM64MOVHreg(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (MOVHreg x:(MOVBload _ _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVBload {
break
}
v.reset(OpARM64MOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHreg x:(MOVBUload _ _))
// cond:
// result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpARM64MOVBUload {
break
}