| // autogenerated from gen/ARM.rules: do not edit! |
| // generated with: cd gen; go run *.go |
| |
| package ssa |
| |
| import "math" |
| |
| var _ = math.MinInt8 // in case not otherwise used |
| func rewriteValueARM(v *Value, config *Config) bool { |
| switch v.Op { |
| case OpARMADC: |
| return rewriteValueARM_OpARMADC(v, config) |
| case OpARMADCconst: |
| return rewriteValueARM_OpARMADCconst(v, config) |
| case OpARMADCshiftLL: |
| return rewriteValueARM_OpARMADCshiftLL(v, config) |
| case OpARMADCshiftLLreg: |
| return rewriteValueARM_OpARMADCshiftLLreg(v, config) |
| case OpARMADCshiftRA: |
| return rewriteValueARM_OpARMADCshiftRA(v, config) |
| case OpARMADCshiftRAreg: |
| return rewriteValueARM_OpARMADCshiftRAreg(v, config) |
| case OpARMADCshiftRL: |
| return rewriteValueARM_OpARMADCshiftRL(v, config) |
| case OpARMADCshiftRLreg: |
| return rewriteValueARM_OpARMADCshiftRLreg(v, config) |
| case OpARMADD: |
| return rewriteValueARM_OpARMADD(v, config) |
| case OpARMADDS: |
| return rewriteValueARM_OpARMADDS(v, config) |
| case OpARMADDSshiftLL: |
| return rewriteValueARM_OpARMADDSshiftLL(v, config) |
| case OpARMADDSshiftLLreg: |
| return rewriteValueARM_OpARMADDSshiftLLreg(v, config) |
| case OpARMADDSshiftRA: |
| return rewriteValueARM_OpARMADDSshiftRA(v, config) |
| case OpARMADDSshiftRAreg: |
| return rewriteValueARM_OpARMADDSshiftRAreg(v, config) |
| case OpARMADDSshiftRL: |
| return rewriteValueARM_OpARMADDSshiftRL(v, config) |
| case OpARMADDSshiftRLreg: |
| return rewriteValueARM_OpARMADDSshiftRLreg(v, config) |
| case OpARMADDconst: |
| return rewriteValueARM_OpARMADDconst(v, config) |
| case OpARMADDshiftLL: |
| return rewriteValueARM_OpARMADDshiftLL(v, config) |
| case OpARMADDshiftLLreg: |
| return rewriteValueARM_OpARMADDshiftLLreg(v, config) |
| case OpARMADDshiftRA: |
| return rewriteValueARM_OpARMADDshiftRA(v, config) |
| case OpARMADDshiftRAreg: |
| return rewriteValueARM_OpARMADDshiftRAreg(v, config) |
| case OpARMADDshiftRL: |
| return rewriteValueARM_OpARMADDshiftRL(v, config) |
| case OpARMADDshiftRLreg: |
| return rewriteValueARM_OpARMADDshiftRLreg(v, config) |
| case OpARMAND: |
| return rewriteValueARM_OpARMAND(v, config) |
| case OpARMANDconst: |
| return rewriteValueARM_OpARMANDconst(v, config) |
| case OpARMANDshiftLL: |
| return rewriteValueARM_OpARMANDshiftLL(v, config) |
| case OpARMANDshiftLLreg: |
| return rewriteValueARM_OpARMANDshiftLLreg(v, config) |
| case OpARMANDshiftRA: |
| return rewriteValueARM_OpARMANDshiftRA(v, config) |
| case OpARMANDshiftRAreg: |
| return rewriteValueARM_OpARMANDshiftRAreg(v, config) |
| case OpARMANDshiftRL: |
| return rewriteValueARM_OpARMANDshiftRL(v, config) |
| case OpARMANDshiftRLreg: |
| return rewriteValueARM_OpARMANDshiftRLreg(v, config) |
| case OpARMBIC: |
| return rewriteValueARM_OpARMBIC(v, config) |
| case OpARMBICconst: |
| return rewriteValueARM_OpARMBICconst(v, config) |
| case OpARMBICshiftLL: |
| return rewriteValueARM_OpARMBICshiftLL(v, config) |
| case OpARMBICshiftLLreg: |
| return rewriteValueARM_OpARMBICshiftLLreg(v, config) |
| case OpARMBICshiftRA: |
| return rewriteValueARM_OpARMBICshiftRA(v, config) |
| case OpARMBICshiftRAreg: |
| return rewriteValueARM_OpARMBICshiftRAreg(v, config) |
| case OpARMBICshiftRL: |
| return rewriteValueARM_OpARMBICshiftRL(v, config) |
| case OpARMBICshiftRLreg: |
| return rewriteValueARM_OpARMBICshiftRLreg(v, config) |
| case OpARMCMOVWHSconst: |
| return rewriteValueARM_OpARMCMOVWHSconst(v, config) |
| case OpARMCMOVWLSconst: |
| return rewriteValueARM_OpARMCMOVWLSconst(v, config) |
| case OpARMCMP: |
| return rewriteValueARM_OpARMCMP(v, config) |
| case OpARMCMPD: |
| return rewriteValueARM_OpARMCMPD(v, config) |
| case OpARMCMPF: |
| return rewriteValueARM_OpARMCMPF(v, config) |
| case OpARMCMPconst: |
| return rewriteValueARM_OpARMCMPconst(v, config) |
| case OpARMCMPshiftLL: |
| return rewriteValueARM_OpARMCMPshiftLL(v, config) |
| case OpARMCMPshiftLLreg: |
| return rewriteValueARM_OpARMCMPshiftLLreg(v, config) |
| case OpARMCMPshiftRA: |
| return rewriteValueARM_OpARMCMPshiftRA(v, config) |
| case OpARMCMPshiftRAreg: |
| return rewriteValueARM_OpARMCMPshiftRAreg(v, config) |
| case OpARMCMPshiftRL: |
| return rewriteValueARM_OpARMCMPshiftRL(v, config) |
| case OpARMCMPshiftRLreg: |
| return rewriteValueARM_OpARMCMPshiftRLreg(v, config) |
| case OpARMEqual: |
| return rewriteValueARM_OpARMEqual(v, config) |
| case OpARMGreaterEqual: |
| return rewriteValueARM_OpARMGreaterEqual(v, config) |
| case OpARMGreaterEqualU: |
| return rewriteValueARM_OpARMGreaterEqualU(v, config) |
| case OpARMGreaterThan: |
| return rewriteValueARM_OpARMGreaterThan(v, config) |
| case OpARMGreaterThanU: |
| return rewriteValueARM_OpARMGreaterThanU(v, config) |
| case OpARMLessEqual: |
| return rewriteValueARM_OpARMLessEqual(v, config) |
| case OpARMLessEqualU: |
| return rewriteValueARM_OpARMLessEqualU(v, config) |
| case OpARMLessThan: |
| return rewriteValueARM_OpARMLessThan(v, config) |
| case OpARMLessThanU: |
| return rewriteValueARM_OpARMLessThanU(v, config) |
| case OpARMMOVBUload: |
| return rewriteValueARM_OpARMMOVBUload(v, config) |
| case OpARMMOVBUreg: |
| return rewriteValueARM_OpARMMOVBUreg(v, config) |
| case OpARMMOVBload: |
| return rewriteValueARM_OpARMMOVBload(v, config) |
| case OpARMMOVBreg: |
| return rewriteValueARM_OpARMMOVBreg(v, config) |
| case OpARMMOVBstore: |
| return rewriteValueARM_OpARMMOVBstore(v, config) |
| case OpARMMOVDload: |
| return rewriteValueARM_OpARMMOVDload(v, config) |
| case OpARMMOVDstore: |
| return rewriteValueARM_OpARMMOVDstore(v, config) |
| case OpARMMOVFload: |
| return rewriteValueARM_OpARMMOVFload(v, config) |
| case OpARMMOVFstore: |
| return rewriteValueARM_OpARMMOVFstore(v, config) |
| case OpARMMOVHUload: |
| return rewriteValueARM_OpARMMOVHUload(v, config) |
| case OpARMMOVHUreg: |
| return rewriteValueARM_OpARMMOVHUreg(v, config) |
| case OpARMMOVHload: |
| return rewriteValueARM_OpARMMOVHload(v, config) |
| case OpARMMOVHreg: |
| return rewriteValueARM_OpARMMOVHreg(v, config) |
| case OpARMMOVHstore: |
| return rewriteValueARM_OpARMMOVHstore(v, config) |
| case OpARMMOVWload: |
| return rewriteValueARM_OpARMMOVWload(v, config) |
| case OpARMMOVWloadidx: |
| return rewriteValueARM_OpARMMOVWloadidx(v, config) |
| case OpARMMOVWloadshiftLL: |
| return rewriteValueARM_OpARMMOVWloadshiftLL(v, config) |
| case OpARMMOVWloadshiftRA: |
| return rewriteValueARM_OpARMMOVWloadshiftRA(v, config) |
| case OpARMMOVWloadshiftRL: |
| return rewriteValueARM_OpARMMOVWloadshiftRL(v, config) |
| case OpARMMOVWreg: |
| return rewriteValueARM_OpARMMOVWreg(v, config) |
| case OpARMMOVWstore: |
| return rewriteValueARM_OpARMMOVWstore(v, config) |
| case OpARMMOVWstoreidx: |
| return rewriteValueARM_OpARMMOVWstoreidx(v, config) |
| case OpARMMOVWstoreshiftLL: |
| return rewriteValueARM_OpARMMOVWstoreshiftLL(v, config) |
| case OpARMMOVWstoreshiftRA: |
| return rewriteValueARM_OpARMMOVWstoreshiftRA(v, config) |
| case OpARMMOVWstoreshiftRL: |
| return rewriteValueARM_OpARMMOVWstoreshiftRL(v, config) |
| case OpARMMUL: |
| return rewriteValueARM_OpARMMUL(v, config) |
| case OpARMMULA: |
| return rewriteValueARM_OpARMMULA(v, config) |
| case OpARMMVN: |
| return rewriteValueARM_OpARMMVN(v, config) |
| case OpARMMVNshiftLL: |
| return rewriteValueARM_OpARMMVNshiftLL(v, config) |
| case OpARMMVNshiftLLreg: |
| return rewriteValueARM_OpARMMVNshiftLLreg(v, config) |
| case OpARMMVNshiftRA: |
| return rewriteValueARM_OpARMMVNshiftRA(v, config) |
| case OpARMMVNshiftRAreg: |
| return rewriteValueARM_OpARMMVNshiftRAreg(v, config) |
| case OpARMMVNshiftRL: |
| return rewriteValueARM_OpARMMVNshiftRL(v, config) |
| case OpARMMVNshiftRLreg: |
| return rewriteValueARM_OpARMMVNshiftRLreg(v, config) |
| case OpARMNotEqual: |
| return rewriteValueARM_OpARMNotEqual(v, config) |
| case OpARMOR: |
| return rewriteValueARM_OpARMOR(v, config) |
| case OpARMORconst: |
| return rewriteValueARM_OpARMORconst(v, config) |
| case OpARMORshiftLL: |
| return rewriteValueARM_OpARMORshiftLL(v, config) |
| case OpARMORshiftLLreg: |
| return rewriteValueARM_OpARMORshiftLLreg(v, config) |
| case OpARMORshiftRA: |
| return rewriteValueARM_OpARMORshiftRA(v, config) |
| case OpARMORshiftRAreg: |
| return rewriteValueARM_OpARMORshiftRAreg(v, config) |
| case OpARMORshiftRL: |
| return rewriteValueARM_OpARMORshiftRL(v, config) |
| case OpARMORshiftRLreg: |
| return rewriteValueARM_OpARMORshiftRLreg(v, config) |
| case OpARMRSB: |
| return rewriteValueARM_OpARMRSB(v, config) |
| case OpARMRSBSshiftLL: |
| return rewriteValueARM_OpARMRSBSshiftLL(v, config) |
| case OpARMRSBSshiftLLreg: |
| return rewriteValueARM_OpARMRSBSshiftLLreg(v, config) |
| case OpARMRSBSshiftRA: |
| return rewriteValueARM_OpARMRSBSshiftRA(v, config) |
| case OpARMRSBSshiftRAreg: |
| return rewriteValueARM_OpARMRSBSshiftRAreg(v, config) |
| case OpARMRSBSshiftRL: |
| return rewriteValueARM_OpARMRSBSshiftRL(v, config) |
| case OpARMRSBSshiftRLreg: |
| return rewriteValueARM_OpARMRSBSshiftRLreg(v, config) |
| case OpARMRSBconst: |
| return rewriteValueARM_OpARMRSBconst(v, config) |
| case OpARMRSBshiftLL: |
| return rewriteValueARM_OpARMRSBshiftLL(v, config) |
| case OpARMRSBshiftLLreg: |
| return rewriteValueARM_OpARMRSBshiftLLreg(v, config) |
| case OpARMRSBshiftRA: |
| return rewriteValueARM_OpARMRSBshiftRA(v, config) |
| case OpARMRSBshiftRAreg: |
| return rewriteValueARM_OpARMRSBshiftRAreg(v, config) |
| case OpARMRSBshiftRL: |
| return rewriteValueARM_OpARMRSBshiftRL(v, config) |
| case OpARMRSBshiftRLreg: |
| return rewriteValueARM_OpARMRSBshiftRLreg(v, config) |
| case OpARMRSCconst: |
| return rewriteValueARM_OpARMRSCconst(v, config) |
| case OpARMRSCshiftLL: |
| return rewriteValueARM_OpARMRSCshiftLL(v, config) |
| case OpARMRSCshiftLLreg: |
| return rewriteValueARM_OpARMRSCshiftLLreg(v, config) |
| case OpARMRSCshiftRA: |
| return rewriteValueARM_OpARMRSCshiftRA(v, config) |
| case OpARMRSCshiftRAreg: |
| return rewriteValueARM_OpARMRSCshiftRAreg(v, config) |
| case OpARMRSCshiftRL: |
| return rewriteValueARM_OpARMRSCshiftRL(v, config) |
| case OpARMRSCshiftRLreg: |
| return rewriteValueARM_OpARMRSCshiftRLreg(v, config) |
| case OpARMSBC: |
| return rewriteValueARM_OpARMSBC(v, config) |
| case OpARMSBCconst: |
| return rewriteValueARM_OpARMSBCconst(v, config) |
| case OpARMSBCshiftLL: |
| return rewriteValueARM_OpARMSBCshiftLL(v, config) |
| case OpARMSBCshiftLLreg: |
| return rewriteValueARM_OpARMSBCshiftLLreg(v, config) |
| case OpARMSBCshiftRA: |
| return rewriteValueARM_OpARMSBCshiftRA(v, config) |
| case OpARMSBCshiftRAreg: |
| return rewriteValueARM_OpARMSBCshiftRAreg(v, config) |
| case OpARMSBCshiftRL: |
| return rewriteValueARM_OpARMSBCshiftRL(v, config) |
| case OpARMSBCshiftRLreg: |
| return rewriteValueARM_OpARMSBCshiftRLreg(v, config) |
| case OpARMSLL: |
| return rewriteValueARM_OpARMSLL(v, config) |
| case OpARMSLLconst: |
| return rewriteValueARM_OpARMSLLconst(v, config) |
| case OpARMSRA: |
| return rewriteValueARM_OpARMSRA(v, config) |
| case OpARMSRAcond: |
| return rewriteValueARM_OpARMSRAcond(v, config) |
| case OpARMSRAconst: |
| return rewriteValueARM_OpARMSRAconst(v, config) |
| case OpARMSRL: |
| return rewriteValueARM_OpARMSRL(v, config) |
| case OpARMSRLconst: |
| return rewriteValueARM_OpARMSRLconst(v, config) |
| case OpARMSUB: |
| return rewriteValueARM_OpARMSUB(v, config) |
| case OpARMSUBS: |
| return rewriteValueARM_OpARMSUBS(v, config) |
| case OpARMSUBSshiftLL: |
| return rewriteValueARM_OpARMSUBSshiftLL(v, config) |
| case OpARMSUBSshiftLLreg: |
| return rewriteValueARM_OpARMSUBSshiftLLreg(v, config) |
| case OpARMSUBSshiftRA: |
| return rewriteValueARM_OpARMSUBSshiftRA(v, config) |
| case OpARMSUBSshiftRAreg: |
| return rewriteValueARM_OpARMSUBSshiftRAreg(v, config) |
| case OpARMSUBSshiftRL: |
| return rewriteValueARM_OpARMSUBSshiftRL(v, config) |
| case OpARMSUBSshiftRLreg: |
| return rewriteValueARM_OpARMSUBSshiftRLreg(v, config) |
| case OpARMSUBconst: |
| return rewriteValueARM_OpARMSUBconst(v, config) |
| case OpARMSUBshiftLL: |
| return rewriteValueARM_OpARMSUBshiftLL(v, config) |
| case OpARMSUBshiftLLreg: |
| return rewriteValueARM_OpARMSUBshiftLLreg(v, config) |
| case OpARMSUBshiftRA: |
| return rewriteValueARM_OpARMSUBshiftRA(v, config) |
| case OpARMSUBshiftRAreg: |
| return rewriteValueARM_OpARMSUBshiftRAreg(v, config) |
| case OpARMSUBshiftRL: |
| return rewriteValueARM_OpARMSUBshiftRL(v, config) |
| case OpARMSUBshiftRLreg: |
| return rewriteValueARM_OpARMSUBshiftRLreg(v, config) |
| case OpARMXOR: |
| return rewriteValueARM_OpARMXOR(v, config) |
| case OpARMXORconst: |
| return rewriteValueARM_OpARMXORconst(v, config) |
| case OpARMXORshiftLL: |
| return rewriteValueARM_OpARMXORshiftLL(v, config) |
| case OpARMXORshiftLLreg: |
| return rewriteValueARM_OpARMXORshiftLLreg(v, config) |
| case OpARMXORshiftRA: |
| return rewriteValueARM_OpARMXORshiftRA(v, config) |
| case OpARMXORshiftRAreg: |
| return rewriteValueARM_OpARMXORshiftRAreg(v, config) |
| case OpARMXORshiftRL: |
| return rewriteValueARM_OpARMXORshiftRL(v, config) |
| case OpARMXORshiftRLreg: |
| return rewriteValueARM_OpARMXORshiftRLreg(v, config) |
| case OpARMXORshiftRR: |
| return rewriteValueARM_OpARMXORshiftRR(v, config) |
| case OpAdd16: |
| return rewriteValueARM_OpAdd16(v, config) |
| case OpAdd32: |
| return rewriteValueARM_OpAdd32(v, config) |
| case OpAdd32F: |
| return rewriteValueARM_OpAdd32F(v, config) |
| case OpAdd32carry: |
| return rewriteValueARM_OpAdd32carry(v, config) |
| case OpAdd32withcarry: |
| return rewriteValueARM_OpAdd32withcarry(v, config) |
| case OpAdd64F: |
| return rewriteValueARM_OpAdd64F(v, config) |
| case OpAdd8: |
| return rewriteValueARM_OpAdd8(v, config) |
| case OpAddPtr: |
| return rewriteValueARM_OpAddPtr(v, config) |
| case OpAddr: |
| return rewriteValueARM_OpAddr(v, config) |
| case OpAnd16: |
| return rewriteValueARM_OpAnd16(v, config) |
| case OpAnd32: |
| return rewriteValueARM_OpAnd32(v, config) |
| case OpAnd8: |
| return rewriteValueARM_OpAnd8(v, config) |
| case OpAndB: |
| return rewriteValueARM_OpAndB(v, config) |
| case OpBswap32: |
| return rewriteValueARM_OpBswap32(v, config) |
| case OpClosureCall: |
| return rewriteValueARM_OpClosureCall(v, config) |
| case OpCom16: |
| return rewriteValueARM_OpCom16(v, config) |
| case OpCom32: |
| return rewriteValueARM_OpCom32(v, config) |
| case OpCom8: |
| return rewriteValueARM_OpCom8(v, config) |
| case OpConst16: |
| return rewriteValueARM_OpConst16(v, config) |
| case OpConst32: |
| return rewriteValueARM_OpConst32(v, config) |
| case OpConst32F: |
| return rewriteValueARM_OpConst32F(v, config) |
| case OpConst64F: |
| return rewriteValueARM_OpConst64F(v, config) |
| case OpConst8: |
| return rewriteValueARM_OpConst8(v, config) |
| case OpConstBool: |
| return rewriteValueARM_OpConstBool(v, config) |
| case OpConstNil: |
| return rewriteValueARM_OpConstNil(v, config) |
| case OpConvert: |
| return rewriteValueARM_OpConvert(v, config) |
| case OpCtz32: |
| return rewriteValueARM_OpCtz32(v, config) |
| case OpCvt32Fto32: |
| return rewriteValueARM_OpCvt32Fto32(v, config) |
| case OpCvt32Fto32U: |
| return rewriteValueARM_OpCvt32Fto32U(v, config) |
| case OpCvt32Fto64F: |
| return rewriteValueARM_OpCvt32Fto64F(v, config) |
| case OpCvt32Uto32F: |
| return rewriteValueARM_OpCvt32Uto32F(v, config) |
| case OpCvt32Uto64F: |
| return rewriteValueARM_OpCvt32Uto64F(v, config) |
| case OpCvt32to32F: |
| return rewriteValueARM_OpCvt32to32F(v, config) |
| case OpCvt32to64F: |
| return rewriteValueARM_OpCvt32to64F(v, config) |
| case OpCvt64Fto32: |
| return rewriteValueARM_OpCvt64Fto32(v, config) |
| case OpCvt64Fto32F: |
| return rewriteValueARM_OpCvt64Fto32F(v, config) |
| case OpCvt64Fto32U: |
| return rewriteValueARM_OpCvt64Fto32U(v, config) |
| case OpDeferCall: |
| return rewriteValueARM_OpDeferCall(v, config) |
| case OpDiv16: |
| return rewriteValueARM_OpDiv16(v, config) |
| case OpDiv16u: |
| return rewriteValueARM_OpDiv16u(v, config) |
| case OpDiv32: |
| return rewriteValueARM_OpDiv32(v, config) |
| case OpDiv32F: |
| return rewriteValueARM_OpDiv32F(v, config) |
| case OpDiv32u: |
| return rewriteValueARM_OpDiv32u(v, config) |
| case OpDiv64F: |
| return rewriteValueARM_OpDiv64F(v, config) |
| case OpDiv8: |
| return rewriteValueARM_OpDiv8(v, config) |
| case OpDiv8u: |
| return rewriteValueARM_OpDiv8u(v, config) |
| case OpEq16: |
| return rewriteValueARM_OpEq16(v, config) |
| case OpEq32: |
| return rewriteValueARM_OpEq32(v, config) |
| case OpEq32F: |
| return rewriteValueARM_OpEq32F(v, config) |
| case OpEq64F: |
| return rewriteValueARM_OpEq64F(v, config) |
| case OpEq8: |
| return rewriteValueARM_OpEq8(v, config) |
| case OpEqB: |
| return rewriteValueARM_OpEqB(v, config) |
| case OpEqPtr: |
| return rewriteValueARM_OpEqPtr(v, config) |
| case OpGeq16: |
| return rewriteValueARM_OpGeq16(v, config) |
| case OpGeq16U: |
| return rewriteValueARM_OpGeq16U(v, config) |
| case OpGeq32: |
| return rewriteValueARM_OpGeq32(v, config) |
| case OpGeq32F: |
| return rewriteValueARM_OpGeq32F(v, config) |
| case OpGeq32U: |
| return rewriteValueARM_OpGeq32U(v, config) |
| case OpGeq64F: |
| return rewriteValueARM_OpGeq64F(v, config) |
| case OpGeq8: |
| return rewriteValueARM_OpGeq8(v, config) |
| case OpGeq8U: |
| return rewriteValueARM_OpGeq8U(v, config) |
| case OpGetClosurePtr: |
| return rewriteValueARM_OpGetClosurePtr(v, config) |
| case OpGoCall: |
| return rewriteValueARM_OpGoCall(v, config) |
| case OpGreater16: |
| return rewriteValueARM_OpGreater16(v, config) |
| case OpGreater16U: |
| return rewriteValueARM_OpGreater16U(v, config) |
| case OpGreater32: |
| return rewriteValueARM_OpGreater32(v, config) |
| case OpGreater32F: |
| return rewriteValueARM_OpGreater32F(v, config) |
| case OpGreater32U: |
| return rewriteValueARM_OpGreater32U(v, config) |
| case OpGreater64F: |
| return rewriteValueARM_OpGreater64F(v, config) |
| case OpGreater8: |
| return rewriteValueARM_OpGreater8(v, config) |
| case OpGreater8U: |
| return rewriteValueARM_OpGreater8U(v, config) |
| case OpHmul16: |
| return rewriteValueARM_OpHmul16(v, config) |
| case OpHmul16u: |
| return rewriteValueARM_OpHmul16u(v, config) |
| case OpHmul32: |
| return rewriteValueARM_OpHmul32(v, config) |
| case OpHmul32u: |
| return rewriteValueARM_OpHmul32u(v, config) |
| case OpHmul8: |
| return rewriteValueARM_OpHmul8(v, config) |
| case OpHmul8u: |
| return rewriteValueARM_OpHmul8u(v, config) |
| case OpInterCall: |
| return rewriteValueARM_OpInterCall(v, config) |
| case OpIsInBounds: |
| return rewriteValueARM_OpIsInBounds(v, config) |
| case OpIsNonNil: |
| return rewriteValueARM_OpIsNonNil(v, config) |
| case OpIsSliceInBounds: |
| return rewriteValueARM_OpIsSliceInBounds(v, config) |
| case OpLeq16: |
| return rewriteValueARM_OpLeq16(v, config) |
| case OpLeq16U: |
| return rewriteValueARM_OpLeq16U(v, config) |
| case OpLeq32: |
| return rewriteValueARM_OpLeq32(v, config) |
| case OpLeq32F: |
| return rewriteValueARM_OpLeq32F(v, config) |
| case OpLeq32U: |
| return rewriteValueARM_OpLeq32U(v, config) |
| case OpLeq64F: |
| return rewriteValueARM_OpLeq64F(v, config) |
| case OpLeq8: |
| return rewriteValueARM_OpLeq8(v, config) |
| case OpLeq8U: |
| return rewriteValueARM_OpLeq8U(v, config) |
| case OpLess16: |
| return rewriteValueARM_OpLess16(v, config) |
| case OpLess16U: |
| return rewriteValueARM_OpLess16U(v, config) |
| case OpLess32: |
| return rewriteValueARM_OpLess32(v, config) |
| case OpLess32F: |
| return rewriteValueARM_OpLess32F(v, config) |
| case OpLess32U: |
| return rewriteValueARM_OpLess32U(v, config) |
| case OpLess64F: |
| return rewriteValueARM_OpLess64F(v, config) |
| case OpLess8: |
| return rewriteValueARM_OpLess8(v, config) |
| case OpLess8U: |
| return rewriteValueARM_OpLess8U(v, config) |
| case OpLoad: |
| return rewriteValueARM_OpLoad(v, config) |
| case OpLrot16: |
| return rewriteValueARM_OpLrot16(v, config) |
| case OpLrot32: |
| return rewriteValueARM_OpLrot32(v, config) |
| case OpLrot8: |
| return rewriteValueARM_OpLrot8(v, config) |
| case OpLsh16x16: |
| return rewriteValueARM_OpLsh16x16(v, config) |
| case OpLsh16x32: |
| return rewriteValueARM_OpLsh16x32(v, config) |
| case OpLsh16x64: |
| return rewriteValueARM_OpLsh16x64(v, config) |
| case OpLsh16x8: |
| return rewriteValueARM_OpLsh16x8(v, config) |
| case OpLsh32x16: |
| return rewriteValueARM_OpLsh32x16(v, config) |
| case OpLsh32x32: |
| return rewriteValueARM_OpLsh32x32(v, config) |
| case OpLsh32x64: |
| return rewriteValueARM_OpLsh32x64(v, config) |
| case OpLsh32x8: |
| return rewriteValueARM_OpLsh32x8(v, config) |
| case OpLsh8x16: |
| return rewriteValueARM_OpLsh8x16(v, config) |
| case OpLsh8x32: |
| return rewriteValueARM_OpLsh8x32(v, config) |
| case OpLsh8x64: |
| return rewriteValueARM_OpLsh8x64(v, config) |
| case OpLsh8x8: |
| return rewriteValueARM_OpLsh8x8(v, config) |
| case OpMod16: |
| return rewriteValueARM_OpMod16(v, config) |
| case OpMod16u: |
| return rewriteValueARM_OpMod16u(v, config) |
| case OpMod32: |
| return rewriteValueARM_OpMod32(v, config) |
| case OpMod32u: |
| return rewriteValueARM_OpMod32u(v, config) |
| case OpMod8: |
| return rewriteValueARM_OpMod8(v, config) |
| case OpMod8u: |
| return rewriteValueARM_OpMod8u(v, config) |
| case OpMove: |
| return rewriteValueARM_OpMove(v, config) |
| case OpMul16: |
| return rewriteValueARM_OpMul16(v, config) |
| case OpMul32: |
| return rewriteValueARM_OpMul32(v, config) |
| case OpMul32F: |
| return rewriteValueARM_OpMul32F(v, config) |
| case OpMul32uhilo: |
| return rewriteValueARM_OpMul32uhilo(v, config) |
| case OpMul64F: |
| return rewriteValueARM_OpMul64F(v, config) |
| case OpMul8: |
| return rewriteValueARM_OpMul8(v, config) |
| case OpNeg16: |
| return rewriteValueARM_OpNeg16(v, config) |
| case OpNeg32: |
| return rewriteValueARM_OpNeg32(v, config) |
| case OpNeg32F: |
| return rewriteValueARM_OpNeg32F(v, config) |
| case OpNeg64F: |
| return rewriteValueARM_OpNeg64F(v, config) |
| case OpNeg8: |
| return rewriteValueARM_OpNeg8(v, config) |
| case OpNeq16: |
| return rewriteValueARM_OpNeq16(v, config) |
| case OpNeq32: |
| return rewriteValueARM_OpNeq32(v, config) |
| case OpNeq32F: |
| return rewriteValueARM_OpNeq32F(v, config) |
| case OpNeq64F: |
| return rewriteValueARM_OpNeq64F(v, config) |
| case OpNeq8: |
| return rewriteValueARM_OpNeq8(v, config) |
| case OpNeqB: |
| return rewriteValueARM_OpNeqB(v, config) |
| case OpNeqPtr: |
| return rewriteValueARM_OpNeqPtr(v, config) |
| case OpNilCheck: |
| return rewriteValueARM_OpNilCheck(v, config) |
| case OpNot: |
| return rewriteValueARM_OpNot(v, config) |
| case OpOffPtr: |
| return rewriteValueARM_OpOffPtr(v, config) |
| case OpOr16: |
| return rewriteValueARM_OpOr16(v, config) |
| case OpOr32: |
| return rewriteValueARM_OpOr32(v, config) |
| case OpOr8: |
| return rewriteValueARM_OpOr8(v, config) |
| case OpOrB: |
| return rewriteValueARM_OpOrB(v, config) |
| case OpRsh16Ux16: |
| return rewriteValueARM_OpRsh16Ux16(v, config) |
| case OpRsh16Ux32: |
| return rewriteValueARM_OpRsh16Ux32(v, config) |
| case OpRsh16Ux64: |
| return rewriteValueARM_OpRsh16Ux64(v, config) |
| case OpRsh16Ux8: |
| return rewriteValueARM_OpRsh16Ux8(v, config) |
| case OpRsh16x16: |
| return rewriteValueARM_OpRsh16x16(v, config) |
| case OpRsh16x32: |
| return rewriteValueARM_OpRsh16x32(v, config) |
| case OpRsh16x64: |
| return rewriteValueARM_OpRsh16x64(v, config) |
| case OpRsh16x8: |
| return rewriteValueARM_OpRsh16x8(v, config) |
| case OpRsh32Ux16: |
| return rewriteValueARM_OpRsh32Ux16(v, config) |
| case OpRsh32Ux32: |
| return rewriteValueARM_OpRsh32Ux32(v, config) |
| case OpRsh32Ux64: |
| return rewriteValueARM_OpRsh32Ux64(v, config) |
| case OpRsh32Ux8: |
| return rewriteValueARM_OpRsh32Ux8(v, config) |
| case OpRsh32x16: |
| return rewriteValueARM_OpRsh32x16(v, config) |
| case OpRsh32x32: |
| return rewriteValueARM_OpRsh32x32(v, config) |
| case OpRsh32x64: |
| return rewriteValueARM_OpRsh32x64(v, config) |
| case OpRsh32x8: |
| return rewriteValueARM_OpRsh32x8(v, config) |
| case OpRsh8Ux16: |
| return rewriteValueARM_OpRsh8Ux16(v, config) |
| case OpRsh8Ux32: |
| return rewriteValueARM_OpRsh8Ux32(v, config) |
| case OpRsh8Ux64: |
| return rewriteValueARM_OpRsh8Ux64(v, config) |
| case OpRsh8Ux8: |
| return rewriteValueARM_OpRsh8Ux8(v, config) |
| case OpRsh8x16: |
| return rewriteValueARM_OpRsh8x16(v, config) |
| case OpRsh8x32: |
| return rewriteValueARM_OpRsh8x32(v, config) |
| case OpRsh8x64: |
| return rewriteValueARM_OpRsh8x64(v, config) |
| case OpRsh8x8: |
| return rewriteValueARM_OpRsh8x8(v, config) |
| case OpSelect0: |
| return rewriteValueARM_OpSelect0(v, config) |
| case OpSelect1: |
| return rewriteValueARM_OpSelect1(v, config) |
| case OpSignExt16to32: |
| return rewriteValueARM_OpSignExt16to32(v, config) |
| case OpSignExt8to16: |
| return rewriteValueARM_OpSignExt8to16(v, config) |
| case OpSignExt8to32: |
| return rewriteValueARM_OpSignExt8to32(v, config) |
| case OpSignmask: |
| return rewriteValueARM_OpSignmask(v, config) |
| case OpSlicemask: |
| return rewriteValueARM_OpSlicemask(v, config) |
| case OpSqrt: |
| return rewriteValueARM_OpSqrt(v, config) |
| case OpStaticCall: |
| return rewriteValueARM_OpStaticCall(v, config) |
| case OpStore: |
| return rewriteValueARM_OpStore(v, config) |
| case OpSub16: |
| return rewriteValueARM_OpSub16(v, config) |
| case OpSub32: |
| return rewriteValueARM_OpSub32(v, config) |
| case OpSub32F: |
| return rewriteValueARM_OpSub32F(v, config) |
| case OpSub32carry: |
| return rewriteValueARM_OpSub32carry(v, config) |
| case OpSub32withcarry: |
| return rewriteValueARM_OpSub32withcarry(v, config) |
| case OpSub64F: |
| return rewriteValueARM_OpSub64F(v, config) |
| case OpSub8: |
| return rewriteValueARM_OpSub8(v, config) |
| case OpSubPtr: |
| return rewriteValueARM_OpSubPtr(v, config) |
| case OpTrunc16to8: |
| return rewriteValueARM_OpTrunc16to8(v, config) |
| case OpTrunc32to16: |
| return rewriteValueARM_OpTrunc32to16(v, config) |
| case OpTrunc32to8: |
| return rewriteValueARM_OpTrunc32to8(v, config) |
| case OpXor16: |
| return rewriteValueARM_OpXor16(v, config) |
| case OpXor32: |
| return rewriteValueARM_OpXor32(v, config) |
| case OpXor8: |
| return rewriteValueARM_OpXor8(v, config) |
| case OpZero: |
| return rewriteValueARM_OpZero(v, config) |
| case OpZeroExt16to32: |
| return rewriteValueARM_OpZeroExt16to32(v, config) |
| case OpZeroExt8to16: |
| return rewriteValueARM_OpZeroExt8to16(v, config) |
| case OpZeroExt8to32: |
| return rewriteValueARM_OpZeroExt8to32(v, config) |
| case OpZeromask: |
| return rewriteValueARM_OpZeromask(v, config) |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADC(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADC (MOVWconst [c]) x flags) |
| // cond: |
| // result: (ADCconst [c] x flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC x (MOVWconst [c]) flags) |
| // cond: |
| // result: (ADCconst [c] x flags) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| flags := v.Args[2] |
| v.reset(OpARMADCconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC x (SLLconst [c] y) flags) |
| // cond: |
| // result: (ADCshiftLL x y [c] flags) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC (SLLconst [c] y) x flags) |
| // cond: |
| // result: (ADCshiftLL x y [c] flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC x (SRLconst [c] y) flags) |
| // cond: |
| // result: (ADCshiftRL x y [c] flags) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC (SRLconst [c] y) x flags) |
| // cond: |
| // result: (ADCshiftRL x y [c] flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC x (SRAconst [c] y) flags) |
| // cond: |
| // result: (ADCshiftRA x y [c] flags) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRAconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC (SRAconst [c] y) x flags) |
| // cond: |
| // result: (ADCshiftRA x y [c] flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRAconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC x (SLL y z) flags) |
| // cond: |
| // result: (ADCshiftLLreg x y z flags) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC (SLL y z) x flags) |
| // cond: |
| // result: (ADCshiftLLreg x y z flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC x (SRL y z) flags) |
| // cond: |
| // result: (ADCshiftRLreg x y z flags) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC (SRL y z) x flags) |
| // cond: |
| // result: (ADCshiftRLreg x y z flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC x (SRA y z) flags) |
| // cond: |
| // result: (ADCshiftRAreg x y z flags) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRA { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADC (SRA y z) x flags) |
| // cond: |
| // result: (ADCshiftRAreg x y z flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRA { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADCconst(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADCconst [c] (ADDconst [d] x) flags) |
| // cond: |
| // result: (ADCconst [int64(int32(c+d))] x flags) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMADDconst { |
| break |
| } |
| d := v_0.AuxInt |
| x := v_0.Args[0] |
| flags := v.Args[1] |
| v.reset(OpARMADCconst) |
| v.AuxInt = int64(int32(c + d)) |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADCconst [c] (SUBconst [d] x) flags) |
| // cond: |
| // result: (ADCconst [int64(int32(c-d))] x flags) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSUBconst { |
| break |
| } |
| d := v_0.AuxInt |
| x := v_0.Args[0] |
| flags := v.Args[1] |
| v.reset(OpARMADCconst) |
| v.AuxInt = int64(int32(c - d)) |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADCshiftLL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADCshiftLL (MOVWconst [c]) x [d] flags) |
| // cond: |
| // result: (ADCconst [c] (SLLconst <x.Type> x [d]) flags) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSLLconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADCshiftLL x (MOVWconst [c]) [d] flags) |
| // cond: |
| // result: (ADCconst x [int64(uint32(c)<<uint64(d))] flags) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| flags := v.Args[2] |
| v.reset(OpARMADCconst) |
| v.AuxInt = int64(uint32(c) << uint64(d)) |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADCshiftLLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADCshiftLLreg (MOVWconst [c]) x y flags) |
| // cond: |
| // result: (ADCconst [c] (SLL <x.Type> x y) flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| flags := v.Args[3] |
| v.reset(OpARMADCconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSLL, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADCshiftLLreg x y (MOVWconst [c]) flags) |
| // cond: |
| // result: (ADCshiftLL x y [c] flags) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| flags := v.Args[3] |
| v.reset(OpARMADCshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADCshiftRA(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADCshiftRA (MOVWconst [c]) x [d] flags) |
| // cond: |
| // result: (ADCconst [c] (SRAconst <x.Type> x [d]) flags) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRAconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADCshiftRA x (MOVWconst [c]) [d] flags) |
| // cond: |
| // result: (ADCconst x [int64(int32(c)>>uint64(d))] flags) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| flags := v.Args[2] |
| v.reset(OpARMADCconst) |
| v.AuxInt = int64(int32(c) >> uint64(d)) |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADCshiftRAreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADCshiftRAreg (MOVWconst [c]) x y flags) |
| // cond: |
| // result: (ADCconst [c] (SRA <x.Type> x y) flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| flags := v.Args[3] |
| v.reset(OpARMADCconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRA, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADCshiftRAreg x y (MOVWconst [c]) flags) |
| // cond: |
| // result: (ADCshiftRA x y [c] flags) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| flags := v.Args[3] |
| v.reset(OpARMADCshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADCshiftRL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADCshiftRL (MOVWconst [c]) x [d] flags) |
| // cond: |
| // result: (ADCconst [c] (SRLconst <x.Type> x [d]) flags) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| flags := v.Args[2] |
| v.reset(OpARMADCconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRLconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADCshiftRL x (MOVWconst [c]) [d] flags) |
| // cond: |
| // result: (ADCconst x [int64(uint32(c)>>uint64(d))] flags) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| flags := v.Args[2] |
| v.reset(OpARMADCconst) |
| v.AuxInt = int64(uint32(c) >> uint64(d)) |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADCshiftRLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADCshiftRLreg (MOVWconst [c]) x y flags) |
| // cond: |
| // result: (ADCconst [c] (SRL <x.Type> x y) flags) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| flags := v.Args[3] |
| v.reset(OpARMADCconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRL, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| v.AddArg(flags) |
| return true |
| } |
| // match: (ADCshiftRLreg x y (MOVWconst [c]) flags) |
| // cond: |
| // result: (ADCshiftRL x y [c] flags) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| flags := v.Args[3] |
| v.reset(OpARMADCshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADD(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADD (MOVWconst [c]) x) |
| // cond: |
| // result: (ADDconst [c] x) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMADDconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| // match: (ADD x (MOVWconst [c])) |
| // cond: |
| // result: (ADDconst [c] x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMADDconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| // match: (ADD x (SLLconst [c] y)) |
| // cond: |
| // result: (ADDshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMADDshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADD (SLLconst [c] y) x) |
| // cond: |
| // result: (ADDshiftLL x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMADDshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADD x (SRLconst [c] y)) |
| // cond: |
| // result: (ADDshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMADDshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADD (SRLconst [c] y) x) |
| // cond: |
| // result: (ADDshiftRL x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMADDshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADD x (SRAconst [c] y)) |
| // cond: |
| // result: (ADDshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRAconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMADDshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADD (SRAconst [c] y) x) |
| // cond: |
| // result: (ADDshiftRA x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRAconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMADDshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADD x (SLL y z)) |
| // cond: |
| // result: (ADDshiftLLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMADDshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADD (SLL y z) x) |
| // cond: |
| // result: (ADDshiftLLreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMADDshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADD x (SRL y z)) |
| // cond: |
| // result: (ADDshiftRLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMADDshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADD (SRL y z) x) |
| // cond: |
| // result: (ADDshiftRLreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMADDshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADD x (SRA y z)) |
| // cond: |
| // result: (ADDshiftRAreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRA { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMADDshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADD (SRA y z) x) |
| // cond: |
| // result: (ADDshiftRAreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRA { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMADDshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADD x (RSBconst [0] y)) |
| // cond: |
| // result: (SUB x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMRSBconst { |
| break |
| } |
| if v_1.AuxInt != 0 { |
| break |
| } |
| y := v_1.Args[0] |
| v.reset(OpARMSUB) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADD (RSBconst [0] y) x) |
| // cond: |
| // result: (SUB x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMRSBconst { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMSUB) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADD (MUL x y) a) |
| // cond: |
| // result: (MULA x y a) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMUL { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| a := v.Args[1] |
| v.reset(OpARMMULA) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(a) |
| return true |
| } |
| // match: (ADD a (MUL x y)) |
| // cond: |
| // result: (MULA x y a) |
| for { |
| a := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMUL { |
| break |
| } |
| x := v_1.Args[0] |
| y := v_1.Args[1] |
| v.reset(OpARMMULA) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(a) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDS(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDS (MOVWconst [c]) x) |
| // cond: |
| // result: (ADDSconst [c] x) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMADDSconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| // match: (ADDS x (MOVWconst [c])) |
| // cond: |
| // result: (ADDSconst [c] x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMADDSconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| // match: (ADDS x (SLLconst [c] y)) |
| // cond: |
| // result: (ADDSshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMADDSshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADDS (SLLconst [c] y) x) |
| // cond: |
| // result: (ADDSshiftLL x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMADDSshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADDS x (SRLconst [c] y)) |
| // cond: |
| // result: (ADDSshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMADDSshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADDS (SRLconst [c] y) x) |
| // cond: |
| // result: (ADDSshiftRL x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMADDSshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADDS x (SRAconst [c] y)) |
| // cond: |
| // result: (ADDSshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRAconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMADDSshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADDS (SRAconst [c] y) x) |
| // cond: |
| // result: (ADDSshiftRA x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRAconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMADDSshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (ADDS x (SLL y z)) |
| // cond: |
| // result: (ADDSshiftLLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMADDSshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADDS (SLL y z) x) |
| // cond: |
| // result: (ADDSshiftLLreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMADDSshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADDS x (SRL y z)) |
| // cond: |
| // result: (ADDSshiftRLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMADDSshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADDS (SRL y z) x) |
| // cond: |
| // result: (ADDSshiftRLreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMADDSshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADDS x (SRA y z)) |
| // cond: |
| // result: (ADDSshiftRAreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRA { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMADDSshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (ADDS (SRA y z) x) |
| // cond: |
| // result: (ADDSshiftRAreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRA { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMADDSshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDSshiftLL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDSshiftLL (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ADDSconst [c] (SLLconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMADDSconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSLLconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDSshiftLL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ADDSconst x [int64(uint32(c)<<uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMADDSconst) |
| v.AuxInt = int64(uint32(c) << uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDSshiftLLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDSshiftLLreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ADDSconst [c] (SLL <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMADDSconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSLL, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDSshiftLLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ADDSshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMADDSshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDSshiftRA(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDSshiftRA (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ADDSconst [c] (SRAconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMADDSconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRAconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDSshiftRA x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ADDSconst x [int64(int32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMADDSconst) |
| v.AuxInt = int64(int32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDSshiftRAreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDSshiftRAreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ADDSconst [c] (SRA <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMADDSconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRA, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDSshiftRAreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ADDSshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMADDSshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDSshiftRL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDSshiftRL (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ADDSconst [c] (SRLconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMADDSconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRLconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDSshiftRL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ADDSconst x [int64(uint32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMADDSconst) |
| v.AuxInt = int64(uint32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDSshiftRLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDSshiftRLreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ADDSconst [c] (SRL <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMADDSconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRL, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDSshiftRLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ADDSshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMADDSshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDconst(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr)) |
| // cond: |
| // result: (MOVWaddr [off1+off2] {sym} ptr) |
| for { |
| off1 := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWaddr { |
| break |
| } |
| off2 := v_0.AuxInt |
| sym := v_0.Aux |
| ptr := v_0.Args[0] |
| v.reset(OpARMMOVWaddr) |
| v.AuxInt = off1 + off2 |
| v.Aux = sym |
| v.AddArg(ptr) |
| return true |
| } |
| // match: (ADDconst [0] x) |
| // cond: |
| // result: x |
| for { |
| if v.AuxInt != 0 { |
| break |
| } |
| x := v.Args[0] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (ADDconst [c] (MOVWconst [d])) |
| // cond: |
| // result: (MOVWconst [int64(int32(c+d))]) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| d := v_0.AuxInt |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = int64(int32(c + d)) |
| return true |
| } |
| // match: (ADDconst [c] (ADDconst [d] x)) |
| // cond: |
| // result: (ADDconst [int64(int32(c+d))] x) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMADDconst { |
| break |
| } |
| d := v_0.AuxInt |
| x := v_0.Args[0] |
| v.reset(OpARMADDconst) |
| v.AuxInt = int64(int32(c + d)) |
| v.AddArg(x) |
| return true |
| } |
| // match: (ADDconst [c] (SUBconst [d] x)) |
| // cond: |
| // result: (ADDconst [int64(int32(c-d))] x) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSUBconst { |
| break |
| } |
| d := v_0.AuxInt |
| x := v_0.Args[0] |
| v.reset(OpARMADDconst) |
| v.AuxInt = int64(int32(c - d)) |
| v.AddArg(x) |
| return true |
| } |
| // match: (ADDconst [c] (RSBconst [d] x)) |
| // cond: |
| // result: (RSBconst [int64(int32(c+d))] x) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMRSBconst { |
| break |
| } |
| d := v_0.AuxInt |
| x := v_0.Args[0] |
| v.reset(OpARMRSBconst) |
| v.AuxInt = int64(int32(c + d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDshiftLL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDshiftLL (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ADDconst [c] (SLLconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMADDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSLLconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDshiftLL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ADDconst x [int64(uint32(c)<<uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMADDconst) |
| v.AuxInt = int64(uint32(c) << uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDshiftLLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDshiftLLreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ADDconst [c] (SLL <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMADDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSLL, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDshiftLLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ADDshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMADDshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDshiftRA(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDshiftRA (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ADDconst [c] (SRAconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMADDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRAconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDshiftRA x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ADDconst x [int64(int32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMADDconst) |
| v.AuxInt = int64(int32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDshiftRAreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDshiftRAreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ADDconst [c] (SRA <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMADDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRA, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDshiftRAreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ADDshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMADDshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDshiftRL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDshiftRL (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ADDconst [c] (SRLconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMADDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRLconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDshiftRL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ADDconst x [int64(uint32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMADDconst) |
| v.AuxInt = int64(uint32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMADDshiftRLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ADDshiftRLreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ADDconst [c] (SRL <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMADDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRL, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ADDshiftRLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ADDshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMADDshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMAND(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (AND (MOVWconst [c]) x) |
| // cond: |
| // result: (ANDconst [c] x) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMANDconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| // match: (AND x (MOVWconst [c])) |
| // cond: |
| // result: (ANDconst [c] x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMANDconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| // match: (AND x (SLLconst [c] y)) |
| // cond: |
| // result: (ANDshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMANDshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND (SLLconst [c] y) x) |
| // cond: |
| // result: (ANDshiftLL x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMANDshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND x (SRLconst [c] y)) |
| // cond: |
| // result: (ANDshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMANDshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND (SRLconst [c] y) x) |
| // cond: |
| // result: (ANDshiftRL x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMANDshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND x (SRAconst [c] y)) |
| // cond: |
| // result: (ANDshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRAconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMANDshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND (SRAconst [c] y) x) |
| // cond: |
| // result: (ANDshiftRA x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRAconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMANDshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND x (SLL y z)) |
| // cond: |
| // result: (ANDshiftLLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMANDshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (AND (SLL y z) x) |
| // cond: |
| // result: (ANDshiftLLreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMANDshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (AND x (SRL y z)) |
| // cond: |
| // result: (ANDshiftRLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMANDshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (AND (SRL y z) x) |
| // cond: |
| // result: (ANDshiftRLreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMANDshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (AND x (SRA y z)) |
| // cond: |
| // result: (ANDshiftRAreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRA { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMANDshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (AND (SRA y z) x) |
| // cond: |
| // result: (ANDshiftRAreg x y z) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRA { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMANDshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (AND x x) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (AND x (MVN y)) |
| // cond: |
| // result: (BIC x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMVN { |
| break |
| } |
| y := v_1.Args[0] |
| v.reset(OpARMBIC) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND (MVN y) x) |
| // cond: |
| // result: (BIC x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMVN { |
| break |
| } |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMBIC) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND x (MVNshiftLL y [c])) |
| // cond: |
| // result: (BICshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMVNshiftLL { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMBICshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND (MVNshiftLL y [c]) x) |
| // cond: |
| // result: (BICshiftLL x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMVNshiftLL { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMBICshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND x (MVNshiftRL y [c])) |
| // cond: |
| // result: (BICshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMVNshiftRL { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMBICshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND (MVNshiftRL y [c]) x) |
| // cond: |
| // result: (BICshiftRL x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMVNshiftRL { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMBICshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND x (MVNshiftRA y [c])) |
| // cond: |
| // result: (BICshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMVNshiftRA { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMBICshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (AND (MVNshiftRA y [c]) x) |
| // cond: |
| // result: (BICshiftRA x y [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMVNshiftRA { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMBICshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMANDconst(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ANDconst [0] _) |
| // cond: |
| // result: (MOVWconst [0]) |
| for { |
| if v.AuxInt != 0 { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (ANDconst [c] x) |
| // cond: int32(c)==-1 |
| // result: x |
| for { |
| c := v.AuxInt |
| x := v.Args[0] |
| if !(int32(c) == -1) { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (ANDconst [c] (MOVWconst [d])) |
| // cond: |
| // result: (MOVWconst [c&d]) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| d := v_0.AuxInt |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = c & d |
| return true |
| } |
| // match: (ANDconst [c] (ANDconst [d] x)) |
| // cond: |
| // result: (ANDconst [c&d] x) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMANDconst { |
| break |
| } |
| d := v_0.AuxInt |
| x := v_0.Args[0] |
| v.reset(OpARMANDconst) |
| v.AuxInt = c & d |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMANDshiftLL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ANDshiftLL (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ANDconst [c] (SLLconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMANDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSLLconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ANDshiftLL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ANDconst x [int64(uint32(c)<<uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMANDconst) |
| v.AuxInt = int64(uint32(c) << uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) |
| // cond: c==d |
| // result: y |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| y := v.Args[1] |
| if y.Op != OpARMSLLconst { |
| break |
| } |
| c := y.AuxInt |
| if x != y.Args[0] { |
| break |
| } |
| if !(c == d) { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = y.Type |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMANDshiftLLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ANDshiftLLreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ANDconst [c] (SLL <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMANDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSLL, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ANDshiftLLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ANDshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMANDshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMANDshiftRA(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ANDshiftRA (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ANDconst [c] (SRAconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMANDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRAconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ANDshiftRA x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ANDconst x [int64(int32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMANDconst) |
| v.AuxInt = int64(int32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) |
| // cond: c==d |
| // result: y |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| y := v.Args[1] |
| if y.Op != OpARMSRAconst { |
| break |
| } |
| c := y.AuxInt |
| if x != y.Args[0] { |
| break |
| } |
| if !(c == d) { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = y.Type |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMANDshiftRAreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ANDshiftRAreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ANDconst [c] (SRA <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMANDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRA, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ANDshiftRAreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ANDshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMANDshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMANDshiftRL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ANDshiftRL (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (ANDconst [c] (SRLconst <x.Type> x [d])) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMANDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRLconst, x.Type) |
| v0.AuxInt = d |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ANDshiftRL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (ANDconst x [int64(uint32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMANDconst) |
| v.AuxInt = int64(uint32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) |
| // cond: c==d |
| // result: y |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| y := v.Args[1] |
| if y.Op != OpARMSRLconst { |
| break |
| } |
| c := y.AuxInt |
| if x != y.Args[0] { |
| break |
| } |
| if !(c == d) { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = y.Type |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMANDshiftRLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ANDshiftRLreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (ANDconst [c] (SRL <x.Type> x y)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMANDconst) |
| v.AuxInt = c |
| v0 := b.NewValue0(v.Line, OpARMSRL, x.Type) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (ANDshiftRLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (ANDshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMANDshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMBIC(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (BIC x (MOVWconst [c])) |
| // cond: |
| // result: (BICconst [c] x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMBICconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| // match: (BIC x (SLLconst [c] y)) |
| // cond: |
| // result: (BICshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMBICshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (BIC x (SRLconst [c] y)) |
| // cond: |
| // result: (BICshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMBICshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (BIC x (SRAconst [c] y)) |
| // cond: |
| // result: (BICshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRAconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMBICshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (BIC x (SLL y z)) |
| // cond: |
| // result: (BICshiftLLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMBICshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (BIC x (SRL y z)) |
| // cond: |
| // result: (BICshiftRLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMBICshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (BIC x (SRA y z)) |
| // cond: |
| // result: (BICshiftRAreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRA { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMBICshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (BIC x x) |
| // cond: |
| // result: (MOVWconst [0]) |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMBICconst(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (BICconst [0] x) |
| // cond: |
| // result: x |
| for { |
| if v.AuxInt != 0 { |
| break |
| } |
| x := v.Args[0] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (BICconst [c] _) |
| // cond: int32(c)==-1 |
| // result: (MOVWconst [0]) |
| for { |
| c := v.AuxInt |
| if !(int32(c) == -1) { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (BICconst [c] (MOVWconst [d])) |
| // cond: |
| // result: (MOVWconst [d&^c]) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| d := v_0.AuxInt |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = d &^ c |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMBICshiftLL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (BICshiftLL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (BICconst x [int64(uint32(c)<<uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMBICconst) |
| v.AuxInt = int64(uint32(c) << uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| // match: (BICshiftLL x (SLLconst x [c]) [d]) |
| // cond: c==d |
| // result: (MOVWconst [0]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLLconst { |
| break |
| } |
| c := v_1.AuxInt |
| if x != v_1.Args[0] { |
| break |
| } |
| if !(c == d) { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMBICshiftLLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (BICshiftLLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (BICshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMBICshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMBICshiftRA(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (BICshiftRA x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (BICconst x [int64(int32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMBICconst) |
| v.AuxInt = int64(int32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| // match: (BICshiftRA x (SRAconst x [c]) [d]) |
| // cond: c==d |
| // result: (MOVWconst [0]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRAconst { |
| break |
| } |
| c := v_1.AuxInt |
| if x != v_1.Args[0] { |
| break |
| } |
| if !(c == d) { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMBICshiftRAreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (BICshiftRAreg x y (MOVWconst [c])) |
| // cond: |
| // result: (BICshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMBICshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMBICshiftRL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (BICshiftRL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (BICconst x [int64(uint32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMBICconst) |
| v.AuxInt = int64(uint32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| // match: (BICshiftRL x (SRLconst x [c]) [d]) |
| // cond: c==d |
| // result: (MOVWconst [0]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRLconst { |
| break |
| } |
| c := v_1.AuxInt |
| if x != v_1.Args[0] { |
| break |
| } |
| if !(c == d) { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMBICshiftRLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (BICshiftRLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (BICshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMBICshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMOVWHSconst(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMOVWHSconst _ (FlagEQ) [c]) |
| // cond: |
| // result: (MOVWconst [c]) |
| for { |
| c := v.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagEQ { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = c |
| return true |
| } |
| // match: (CMOVWHSconst x (FlagLT_ULT)) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagLT_ULT { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (CMOVWHSconst _ (FlagLT_UGT) [c]) |
| // cond: |
| // result: (MOVWconst [c]) |
| for { |
| c := v.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagLT_UGT { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = c |
| return true |
| } |
| // match: (CMOVWHSconst x (FlagGT_ULT)) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagGT_ULT { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (CMOVWHSconst _ (FlagGT_UGT) [c]) |
| // cond: |
| // result: (MOVWconst [c]) |
| for { |
| c := v.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagGT_UGT { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = c |
| return true |
| } |
| // match: (CMOVWHSconst x (InvertFlags flags) [c]) |
| // cond: |
| // result: (CMOVWLSconst x flags [c]) |
| for { |
| c := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMInvertFlags { |
| break |
| } |
| flags := v_1.Args[0] |
| v.reset(OpARMCMOVWLSconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMOVWLSconst(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMOVWLSconst _ (FlagEQ) [c]) |
| // cond: |
| // result: (MOVWconst [c]) |
| for { |
| c := v.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagEQ { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = c |
| return true |
| } |
| // match: (CMOVWLSconst _ (FlagLT_ULT) [c]) |
| // cond: |
| // result: (MOVWconst [c]) |
| for { |
| c := v.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagLT_ULT { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = c |
| return true |
| } |
| // match: (CMOVWLSconst x (FlagLT_UGT)) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagLT_UGT { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (CMOVWLSconst _ (FlagGT_ULT) [c]) |
| // cond: |
| // result: (MOVWconst [c]) |
| for { |
| c := v.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagGT_ULT { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = c |
| return true |
| } |
| // match: (CMOVWLSconst x (FlagGT_UGT)) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMFlagGT_UGT { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (CMOVWLSconst x (InvertFlags flags) [c]) |
| // cond: |
| // result: (CMOVWHSconst x flags [c]) |
| for { |
| c := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMInvertFlags { |
| break |
| } |
| flags := v_1.Args[0] |
| v.reset(OpARMCMOVWHSconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(flags) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMP(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMP x (MOVWconst [c])) |
| // cond: |
| // result: (CMPconst [c] x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMCMPconst) |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| // match: (CMP (MOVWconst [c]) x) |
| // cond: |
| // result: (InvertFlags (CMPconst [c] x)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPconst, TypeFlags) |
| v0.AuxInt = c |
| v0.AddArg(x) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMP x (SLLconst [c] y)) |
| // cond: |
| // result: (CMPshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMCMPshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (CMP (SLLconst [c] y) x) |
| // cond: |
| // result: (InvertFlags (CMPshiftLL x y [c])) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPshiftLL, TypeFlags) |
| v0.AuxInt = c |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMP x (SRLconst [c] y)) |
| // cond: |
| // result: (CMPshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRLconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMCMPshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (CMP (SRLconst [c] y) x) |
| // cond: |
| // result: (InvertFlags (CMPshiftRL x y [c])) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRLconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPshiftRL, TypeFlags) |
| v0.AuxInt = c |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMP x (SRAconst [c] y)) |
| // cond: |
| // result: (CMPshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRAconst { |
| break |
| } |
| c := v_1.AuxInt |
| y := v_1.Args[0] |
| v.reset(OpARMCMPshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (CMP (SRAconst [c] y) x) |
| // cond: |
| // result: (InvertFlags (CMPshiftRA x y [c])) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRAconst { |
| break |
| } |
| c := v_0.AuxInt |
| y := v_0.Args[0] |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPshiftRA, TypeFlags) |
| v0.AuxInt = c |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMP x (SLL y z)) |
| // cond: |
| // result: (CMPshiftLLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSLL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMCMPshiftLLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (CMP (SLL y z) x) |
| // cond: |
| // result: (InvertFlags (CMPshiftLLreg x y z)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSLL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPshiftLLreg, TypeFlags) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v0.AddArg(z) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMP x (SRL y z)) |
| // cond: |
| // result: (CMPshiftRLreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRL { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMCMPshiftRLreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (CMP (SRL y z) x) |
| // cond: |
| // result: (InvertFlags (CMPshiftRLreg x y z)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRL { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPshiftRLreg, TypeFlags) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v0.AddArg(z) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMP x (SRA y z)) |
| // cond: |
| // result: (CMPshiftRAreg x y z) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMSRA { |
| break |
| } |
| y := v_1.Args[0] |
| z := v_1.Args[1] |
| v.reset(OpARMCMPshiftRAreg) |
| v.AddArg(x) |
| v.AddArg(y) |
| v.AddArg(z) |
| return true |
| } |
| // match: (CMP (SRA y z) x) |
| // cond: |
| // result: (InvertFlags (CMPshiftRAreg x y z)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRA { |
| break |
| } |
| y := v_0.Args[0] |
| z := v_0.Args[1] |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPshiftRAreg, TypeFlags) |
| v0.AddArg(x) |
| v0.AddArg(y) |
| v0.AddArg(z) |
| v.AddArg(v0) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPD(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPD x (MOVDconst [0])) |
| // cond: |
| // result: (CMPD0 x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVDconst { |
| break |
| } |
| if v_1.AuxInt != 0 { |
| break |
| } |
| v.reset(OpARMCMPD0) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPF(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPF x (MOVFconst [0])) |
| // cond: |
| // result: (CMPF0 x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVFconst { |
| break |
| } |
| if v_1.AuxInt != 0 { |
| break |
| } |
| v.reset(OpARMCMPF0) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPconst(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPconst (MOVWconst [x]) [y]) |
| // cond: int32(x)==int32(y) |
| // result: (FlagEQ) |
| for { |
| y := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| x := v_0.AuxInt |
| if !(int32(x) == int32(y)) { |
| break |
| } |
| v.reset(OpARMFlagEQ) |
| return true |
| } |
| // match: (CMPconst (MOVWconst [x]) [y]) |
| // cond: int32(x)<int32(y) && uint32(x)<uint32(y) |
| // result: (FlagLT_ULT) |
| for { |
| y := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| x := v_0.AuxInt |
| if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { |
| break |
| } |
| v.reset(OpARMFlagLT_ULT) |
| return true |
| } |
| // match: (CMPconst (MOVWconst [x]) [y]) |
| // cond: int32(x)<int32(y) && uint32(x)>uint32(y) |
| // result: (FlagLT_UGT) |
| for { |
| y := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| x := v_0.AuxInt |
| if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { |
| break |
| } |
| v.reset(OpARMFlagLT_UGT) |
| return true |
| } |
| // match: (CMPconst (MOVWconst [x]) [y]) |
| // cond: int32(x)>int32(y) && uint32(x)<uint32(y) |
| // result: (FlagGT_ULT) |
| for { |
| y := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| x := v_0.AuxInt |
| if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { |
| break |
| } |
| v.reset(OpARMFlagGT_ULT) |
| return true |
| } |
| // match: (CMPconst (MOVWconst [x]) [y]) |
| // cond: int32(x)>int32(y) && uint32(x)>uint32(y) |
| // result: (FlagGT_UGT) |
| for { |
| y := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| x := v_0.AuxInt |
| if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { |
| break |
| } |
| v.reset(OpARMFlagGT_UGT) |
| return true |
| } |
| // match: (CMPconst (MOVBUreg _) [c]) |
| // cond: 0xff < c |
| // result: (FlagLT_ULT) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVBUreg { |
| break |
| } |
| if !(0xff < c) { |
| break |
| } |
| v.reset(OpARMFlagLT_ULT) |
| return true |
| } |
| // match: (CMPconst (MOVHUreg _) [c]) |
| // cond: 0xffff < c |
| // result: (FlagLT_ULT) |
| for { |
| c := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVHUreg { |
| break |
| } |
| if !(0xffff < c) { |
| break |
| } |
| v.reset(OpARMFlagLT_ULT) |
| return true |
| } |
| // match: (CMPconst (ANDconst _ [m]) [n]) |
| // cond: 0 <= int32(m) && int32(m) < int32(n) |
| // result: (FlagLT_ULT) |
| for { |
| n := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMANDconst { |
| break |
| } |
| m := v_0.AuxInt |
| if !(0 <= int32(m) && int32(m) < int32(n)) { |
| break |
| } |
| v.reset(OpARMFlagLT_ULT) |
| return true |
| } |
| // match: (CMPconst (SRLconst _ [c]) [n]) |
| // cond: 0 <= n && 0 < c && c <= 32 && (1<<uint32(32-c)) <= uint32(n) |
| // result: (FlagLT_ULT) |
| for { |
| n := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMSRLconst { |
| break |
| } |
| c := v_0.AuxInt |
| if !(0 <= n && 0 < c && c <= 32 && (1<<uint32(32-c)) <= uint32(n)) { |
| break |
| } |
| v.reset(OpARMFlagLT_ULT) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPshiftLL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPshiftLL (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPconst, TypeFlags) |
| v0.AuxInt = c |
| v1 := b.NewValue0(v.Line, OpARMSLLconst, x.Type) |
| v1.AuxInt = d |
| v1.AddArg(x) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMPshiftLL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (CMPconst x [int64(uint32(c)<<uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMCMPconst) |
| v.AuxInt = int64(uint32(c) << uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPshiftLLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPshiftLLreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (InvertFlags (CMPconst [c] (SLL <x.Type> x y))) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPconst, TypeFlags) |
| v0.AuxInt = c |
| v1 := b.NewValue0(v.Line, OpARMSLL, x.Type) |
| v1.AddArg(x) |
| v1.AddArg(y) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMPshiftLLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (CMPshiftLL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMCMPshiftLL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPshiftRA(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPshiftRA (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPconst, TypeFlags) |
| v0.AuxInt = c |
| v1 := b.NewValue0(v.Line, OpARMSRAconst, x.Type) |
| v1.AuxInt = d |
| v1.AddArg(x) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMPshiftRA x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (CMPconst x [int64(int32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMCMPconst) |
| v.AuxInt = int64(int32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPshiftRAreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPshiftRAreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (InvertFlags (CMPconst [c] (SRA <x.Type> x y))) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPconst, TypeFlags) |
| v0.AuxInt = c |
| v1 := b.NewValue0(v.Line, OpARMSRA, x.Type) |
| v1.AddArg(x) |
| v1.AddArg(y) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMPshiftRAreg x y (MOVWconst [c])) |
| // cond: |
| // result: (CMPshiftRA x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMCMPshiftRA) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPshiftRL(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPshiftRL (MOVWconst [c]) x [d]) |
| // cond: |
| // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) |
| for { |
| d := v.AuxInt |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPconst, TypeFlags) |
| v0.AuxInt = c |
| v1 := b.NewValue0(v.Line, OpARMSRLconst, x.Type) |
| v1.AuxInt = d |
| v1.AddArg(x) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMPshiftRL x (MOVWconst [c]) [d]) |
| // cond: |
| // result: (CMPconst x [int64(uint32(c)>>uint64(d))]) |
| for { |
| d := v.AuxInt |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpARMCMPconst) |
| v.AuxInt = int64(uint32(c) >> uint64(d)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMCMPshiftRLreg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (CMPshiftRLreg (MOVWconst [c]) x y) |
| // cond: |
| // result: (InvertFlags (CMPconst [c] (SRL <x.Type> x y))) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_0.AuxInt |
| x := v.Args[1] |
| y := v.Args[2] |
| v.reset(OpARMInvertFlags) |
| v0 := b.NewValue0(v.Line, OpARMCMPconst, TypeFlags) |
| v0.AuxInt = c |
| v1 := b.NewValue0(v.Line, OpARMSRL, x.Type) |
| v1.AddArg(x) |
| v1.AddArg(y) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (CMPshiftRLreg x y (MOVWconst [c])) |
| // cond: |
| // result: (CMPshiftRL x y [c]) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v_2 := v.Args[2] |
| if v_2.Op != OpARMMOVWconst { |
| break |
| } |
| c := v_2.AuxInt |
| v.reset(OpARMCMPshiftRL) |
| v.AuxInt = c |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValueARM_OpARMEqual(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Equal (FlagEQ)) |
| // cond: |
| // result: (MOVWconst [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMFlagEQ { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (Equal (FlagLT_ULT)) |
| // cond: |
| // result: (MOVWconst [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMFlagLT_ULT { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (Equal (FlagLT_UGT)) |
| // cond: |
| // result: (MOVWconst [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpARMFlagLT_UGT { |
| break |
| } |
| v.reset(OpARMMOVWconst) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (Equal (FlagG
|