| // Code generated from _gen/generic.rules using 'go generate'; DO NOT EDIT. |
| |
| package ssa |
| |
| import "math" |
| import "math/bits" |
| import "cmd/internal/obj" |
| import "cmd/compile/internal/types" |
| import "cmd/compile/internal/ir" |
| |
| func rewriteValuegeneric(v *Value) bool { |
| switch v.Op { |
| case OpAdd16: |
| return rewriteValuegeneric_OpAdd16(v) |
| case OpAdd32: |
| return rewriteValuegeneric_OpAdd32(v) |
| case OpAdd32F: |
| return rewriteValuegeneric_OpAdd32F(v) |
| case OpAdd64: |
| return rewriteValuegeneric_OpAdd64(v) |
| case OpAdd64F: |
| return rewriteValuegeneric_OpAdd64F(v) |
| case OpAdd8: |
| return rewriteValuegeneric_OpAdd8(v) |
| case OpAddPtr: |
| return rewriteValuegeneric_OpAddPtr(v) |
| case OpAnd16: |
| return rewriteValuegeneric_OpAnd16(v) |
| case OpAnd32: |
| return rewriteValuegeneric_OpAnd32(v) |
| case OpAnd64: |
| return rewriteValuegeneric_OpAnd64(v) |
| case OpAnd8: |
| return rewriteValuegeneric_OpAnd8(v) |
| case OpAndB: |
| return rewriteValuegeneric_OpAndB(v) |
| case OpArraySelect: |
| return rewriteValuegeneric_OpArraySelect(v) |
| case OpBitLen16: |
| return rewriteValuegeneric_OpBitLen16(v) |
| case OpBitLen32: |
| return rewriteValuegeneric_OpBitLen32(v) |
| case OpBitLen64: |
| return rewriteValuegeneric_OpBitLen64(v) |
| case OpBitLen8: |
| return rewriteValuegeneric_OpBitLen8(v) |
| case OpCeil: |
| return rewriteValuegeneric_OpCeil(v) |
| case OpCom16: |
| return rewriteValuegeneric_OpCom16(v) |
| case OpCom32: |
| return rewriteValuegeneric_OpCom32(v) |
| case OpCom64: |
| return rewriteValuegeneric_OpCom64(v) |
| case OpCom8: |
| return rewriteValuegeneric_OpCom8(v) |
| case OpConstInterface: |
| return rewriteValuegeneric_OpConstInterface(v) |
| case OpConstSlice: |
| return rewriteValuegeneric_OpConstSlice(v) |
| case OpConstString: |
| return rewriteValuegeneric_OpConstString(v) |
| case OpConvert: |
| return rewriteValuegeneric_OpConvert(v) |
| case OpCtz16: |
| return rewriteValuegeneric_OpCtz16(v) |
| case OpCtz32: |
| return rewriteValuegeneric_OpCtz32(v) |
| case OpCtz64: |
| return rewriteValuegeneric_OpCtz64(v) |
| case OpCtz8: |
| return rewriteValuegeneric_OpCtz8(v) |
| case OpCvt32Fto32: |
| return rewriteValuegeneric_OpCvt32Fto32(v) |
| case OpCvt32Fto64: |
| return rewriteValuegeneric_OpCvt32Fto64(v) |
| case OpCvt32Fto64F: |
| return rewriteValuegeneric_OpCvt32Fto64F(v) |
| case OpCvt32to32F: |
| return rewriteValuegeneric_OpCvt32to32F(v) |
| case OpCvt32to64F: |
| return rewriteValuegeneric_OpCvt32to64F(v) |
| case OpCvt64Fto32: |
| return rewriteValuegeneric_OpCvt64Fto32(v) |
| case OpCvt64Fto32F: |
| return rewriteValuegeneric_OpCvt64Fto32F(v) |
| case OpCvt64Fto64: |
| return rewriteValuegeneric_OpCvt64Fto64(v) |
| case OpCvt64to32F: |
| return rewriteValuegeneric_OpCvt64to32F(v) |
| case OpCvt64to64F: |
| return rewriteValuegeneric_OpCvt64to64F(v) |
| case OpCvtBoolToUint8: |
| return rewriteValuegeneric_OpCvtBoolToUint8(v) |
| case OpDiv16: |
| return rewriteValuegeneric_OpDiv16(v) |
| case OpDiv16u: |
| return rewriteValuegeneric_OpDiv16u(v) |
| case OpDiv32: |
| return rewriteValuegeneric_OpDiv32(v) |
| case OpDiv32F: |
| return rewriteValuegeneric_OpDiv32F(v) |
| case OpDiv32u: |
| return rewriteValuegeneric_OpDiv32u(v) |
| case OpDiv64: |
| return rewriteValuegeneric_OpDiv64(v) |
| case OpDiv64F: |
| return rewriteValuegeneric_OpDiv64F(v) |
| case OpDiv64u: |
| return rewriteValuegeneric_OpDiv64u(v) |
| case OpDiv8: |
| return rewriteValuegeneric_OpDiv8(v) |
| case OpDiv8u: |
| return rewriteValuegeneric_OpDiv8u(v) |
| case OpEq16: |
| return rewriteValuegeneric_OpEq16(v) |
| case OpEq32: |
| return rewriteValuegeneric_OpEq32(v) |
| case OpEq32F: |
| return rewriteValuegeneric_OpEq32F(v) |
| case OpEq64: |
| return rewriteValuegeneric_OpEq64(v) |
| case OpEq64F: |
| return rewriteValuegeneric_OpEq64F(v) |
| case OpEq8: |
| return rewriteValuegeneric_OpEq8(v) |
| case OpEqB: |
| return rewriteValuegeneric_OpEqB(v) |
| case OpEqInter: |
| return rewriteValuegeneric_OpEqInter(v) |
| case OpEqPtr: |
| return rewriteValuegeneric_OpEqPtr(v) |
| case OpEqSlice: |
| return rewriteValuegeneric_OpEqSlice(v) |
| case OpFloor: |
| return rewriteValuegeneric_OpFloor(v) |
| case OpIMake: |
| return rewriteValuegeneric_OpIMake(v) |
| case OpInterLECall: |
| return rewriteValuegeneric_OpInterLECall(v) |
| case OpIsInBounds: |
| return rewriteValuegeneric_OpIsInBounds(v) |
| case OpIsNonNil: |
| return rewriteValuegeneric_OpIsNonNil(v) |
| case OpIsSliceInBounds: |
| return rewriteValuegeneric_OpIsSliceInBounds(v) |
| case OpLeq16: |
| return rewriteValuegeneric_OpLeq16(v) |
| case OpLeq16U: |
| return rewriteValuegeneric_OpLeq16U(v) |
| case OpLeq32: |
| return rewriteValuegeneric_OpLeq32(v) |
| case OpLeq32F: |
| return rewriteValuegeneric_OpLeq32F(v) |
| case OpLeq32U: |
| return rewriteValuegeneric_OpLeq32U(v) |
| case OpLeq64: |
| return rewriteValuegeneric_OpLeq64(v) |
| case OpLeq64F: |
| return rewriteValuegeneric_OpLeq64F(v) |
| case OpLeq64U: |
| return rewriteValuegeneric_OpLeq64U(v) |
| case OpLeq8: |
| return rewriteValuegeneric_OpLeq8(v) |
| case OpLeq8U: |
| return rewriteValuegeneric_OpLeq8U(v) |
| case OpLess16: |
| return rewriteValuegeneric_OpLess16(v) |
| case OpLess16U: |
| return rewriteValuegeneric_OpLess16U(v) |
| case OpLess32: |
| return rewriteValuegeneric_OpLess32(v) |
| case OpLess32F: |
| return rewriteValuegeneric_OpLess32F(v) |
| case OpLess32U: |
| return rewriteValuegeneric_OpLess32U(v) |
| case OpLess64: |
| return rewriteValuegeneric_OpLess64(v) |
| case OpLess64F: |
| return rewriteValuegeneric_OpLess64F(v) |
| case OpLess64U: |
| return rewriteValuegeneric_OpLess64U(v) |
| case OpLess8: |
| return rewriteValuegeneric_OpLess8(v) |
| case OpLess8U: |
| return rewriteValuegeneric_OpLess8U(v) |
| case OpLoad: |
| return rewriteValuegeneric_OpLoad(v) |
| case OpLsh16x16: |
| return rewriteValuegeneric_OpLsh16x16(v) |
| case OpLsh16x32: |
| return rewriteValuegeneric_OpLsh16x32(v) |
| case OpLsh16x64: |
| return rewriteValuegeneric_OpLsh16x64(v) |
| case OpLsh16x8: |
| return rewriteValuegeneric_OpLsh16x8(v) |
| case OpLsh32x16: |
| return rewriteValuegeneric_OpLsh32x16(v) |
| case OpLsh32x32: |
| return rewriteValuegeneric_OpLsh32x32(v) |
| case OpLsh32x64: |
| return rewriteValuegeneric_OpLsh32x64(v) |
| case OpLsh32x8: |
| return rewriteValuegeneric_OpLsh32x8(v) |
| case OpLsh64x16: |
| return rewriteValuegeneric_OpLsh64x16(v) |
| case OpLsh64x32: |
| return rewriteValuegeneric_OpLsh64x32(v) |
| case OpLsh64x64: |
| return rewriteValuegeneric_OpLsh64x64(v) |
| case OpLsh64x8: |
| return rewriteValuegeneric_OpLsh64x8(v) |
| case OpLsh8x16: |
| return rewriteValuegeneric_OpLsh8x16(v) |
| case OpLsh8x32: |
| return rewriteValuegeneric_OpLsh8x32(v) |
| case OpLsh8x64: |
| return rewriteValuegeneric_OpLsh8x64(v) |
| case OpLsh8x8: |
| return rewriteValuegeneric_OpLsh8x8(v) |
| case OpMod16: |
| return rewriteValuegeneric_OpMod16(v) |
| case OpMod16u: |
| return rewriteValuegeneric_OpMod16u(v) |
| case OpMod32: |
| return rewriteValuegeneric_OpMod32(v) |
| case OpMod32u: |
| return rewriteValuegeneric_OpMod32u(v) |
| case OpMod64: |
| return rewriteValuegeneric_OpMod64(v) |
| case OpMod64u: |
| return rewriteValuegeneric_OpMod64u(v) |
| case OpMod8: |
| return rewriteValuegeneric_OpMod8(v) |
| case OpMod8u: |
| return rewriteValuegeneric_OpMod8u(v) |
| case OpMove: |
| return rewriteValuegeneric_OpMove(v) |
| case OpMul16: |
| return rewriteValuegeneric_OpMul16(v) |
| case OpMul32: |
| return rewriteValuegeneric_OpMul32(v) |
| case OpMul32F: |
| return rewriteValuegeneric_OpMul32F(v) |
| case OpMul64: |
| return rewriteValuegeneric_OpMul64(v) |
| case OpMul64F: |
| return rewriteValuegeneric_OpMul64F(v) |
| case OpMul8: |
| return rewriteValuegeneric_OpMul8(v) |
| case OpNeg16: |
| return rewriteValuegeneric_OpNeg16(v) |
| case OpNeg32: |
| return rewriteValuegeneric_OpNeg32(v) |
| case OpNeg32F: |
| return rewriteValuegeneric_OpNeg32F(v) |
| case OpNeg64: |
| return rewriteValuegeneric_OpNeg64(v) |
| case OpNeg64F: |
| return rewriteValuegeneric_OpNeg64F(v) |
| case OpNeg8: |
| return rewriteValuegeneric_OpNeg8(v) |
| case OpNeq16: |
| return rewriteValuegeneric_OpNeq16(v) |
| case OpNeq32: |
| return rewriteValuegeneric_OpNeq32(v) |
| case OpNeq32F: |
| return rewriteValuegeneric_OpNeq32F(v) |
| case OpNeq64: |
| return rewriteValuegeneric_OpNeq64(v) |
| case OpNeq64F: |
| return rewriteValuegeneric_OpNeq64F(v) |
| case OpNeq8: |
| return rewriteValuegeneric_OpNeq8(v) |
| case OpNeqB: |
| return rewriteValuegeneric_OpNeqB(v) |
| case OpNeqInter: |
| return rewriteValuegeneric_OpNeqInter(v) |
| case OpNeqPtr: |
| return rewriteValuegeneric_OpNeqPtr(v) |
| case OpNeqSlice: |
| return rewriteValuegeneric_OpNeqSlice(v) |
| case OpNilCheck: |
| return rewriteValuegeneric_OpNilCheck(v) |
| case OpNot: |
| return rewriteValuegeneric_OpNot(v) |
| case OpOffPtr: |
| return rewriteValuegeneric_OpOffPtr(v) |
| case OpOr16: |
| return rewriteValuegeneric_OpOr16(v) |
| case OpOr32: |
| return rewriteValuegeneric_OpOr32(v) |
| case OpOr64: |
| return rewriteValuegeneric_OpOr64(v) |
| case OpOr8: |
| return rewriteValuegeneric_OpOr8(v) |
| case OpOrB: |
| return rewriteValuegeneric_OpOrB(v) |
| case OpPhi: |
| return rewriteValuegeneric_OpPhi(v) |
| case OpPtrIndex: |
| return rewriteValuegeneric_OpPtrIndex(v) |
| case OpRotateLeft16: |
| return rewriteValuegeneric_OpRotateLeft16(v) |
| case OpRotateLeft32: |
| return rewriteValuegeneric_OpRotateLeft32(v) |
| case OpRotateLeft64: |
| return rewriteValuegeneric_OpRotateLeft64(v) |
| case OpRotateLeft8: |
| return rewriteValuegeneric_OpRotateLeft8(v) |
| case OpRound32F: |
| return rewriteValuegeneric_OpRound32F(v) |
| case OpRound64F: |
| return rewriteValuegeneric_OpRound64F(v) |
| case OpRoundToEven: |
| return rewriteValuegeneric_OpRoundToEven(v) |
| case OpRsh16Ux16: |
| return rewriteValuegeneric_OpRsh16Ux16(v) |
| case OpRsh16Ux32: |
| return rewriteValuegeneric_OpRsh16Ux32(v) |
| case OpRsh16Ux64: |
| return rewriteValuegeneric_OpRsh16Ux64(v) |
| case OpRsh16Ux8: |
| return rewriteValuegeneric_OpRsh16Ux8(v) |
| case OpRsh16x16: |
| return rewriteValuegeneric_OpRsh16x16(v) |
| case OpRsh16x32: |
| return rewriteValuegeneric_OpRsh16x32(v) |
| case OpRsh16x64: |
| return rewriteValuegeneric_OpRsh16x64(v) |
| case OpRsh16x8: |
| return rewriteValuegeneric_OpRsh16x8(v) |
| case OpRsh32Ux16: |
| return rewriteValuegeneric_OpRsh32Ux16(v) |
| case OpRsh32Ux32: |
| return rewriteValuegeneric_OpRsh32Ux32(v) |
| case OpRsh32Ux64: |
| return rewriteValuegeneric_OpRsh32Ux64(v) |
| case OpRsh32Ux8: |
| return rewriteValuegeneric_OpRsh32Ux8(v) |
| case OpRsh32x16: |
| return rewriteValuegeneric_OpRsh32x16(v) |
| case OpRsh32x32: |
| return rewriteValuegeneric_OpRsh32x32(v) |
| case OpRsh32x64: |
| return rewriteValuegeneric_OpRsh32x64(v) |
| case OpRsh32x8: |
| return rewriteValuegeneric_OpRsh32x8(v) |
| case OpRsh64Ux16: |
| return rewriteValuegeneric_OpRsh64Ux16(v) |
| case OpRsh64Ux32: |
| return rewriteValuegeneric_OpRsh64Ux32(v) |
| case OpRsh64Ux64: |
| return rewriteValuegeneric_OpRsh64Ux64(v) |
| case OpRsh64Ux8: |
| return rewriteValuegeneric_OpRsh64Ux8(v) |
| case OpRsh64x16: |
| return rewriteValuegeneric_OpRsh64x16(v) |
| case OpRsh64x32: |
| return rewriteValuegeneric_OpRsh64x32(v) |
| case OpRsh64x64: |
| return rewriteValuegeneric_OpRsh64x64(v) |
| case OpRsh64x8: |
| return rewriteValuegeneric_OpRsh64x8(v) |
| case OpRsh8Ux16: |
| return rewriteValuegeneric_OpRsh8Ux16(v) |
| case OpRsh8Ux32: |
| return rewriteValuegeneric_OpRsh8Ux32(v) |
| case OpRsh8Ux64: |
| return rewriteValuegeneric_OpRsh8Ux64(v) |
| case OpRsh8Ux8: |
| return rewriteValuegeneric_OpRsh8Ux8(v) |
| case OpRsh8x16: |
| return rewriteValuegeneric_OpRsh8x16(v) |
| case OpRsh8x32: |
| return rewriteValuegeneric_OpRsh8x32(v) |
| case OpRsh8x64: |
| return rewriteValuegeneric_OpRsh8x64(v) |
| case OpRsh8x8: |
| return rewriteValuegeneric_OpRsh8x8(v) |
| case OpSelect0: |
| return rewriteValuegeneric_OpSelect0(v) |
| case OpSelect1: |
| return rewriteValuegeneric_OpSelect1(v) |
| case OpSelectN: |
| return rewriteValuegeneric_OpSelectN(v) |
| case OpSignExt16to32: |
| return rewriteValuegeneric_OpSignExt16to32(v) |
| case OpSignExt16to64: |
| return rewriteValuegeneric_OpSignExt16to64(v) |
| case OpSignExt32to64: |
| return rewriteValuegeneric_OpSignExt32to64(v) |
| case OpSignExt8to16: |
| return rewriteValuegeneric_OpSignExt8to16(v) |
| case OpSignExt8to32: |
| return rewriteValuegeneric_OpSignExt8to32(v) |
| case OpSignExt8to64: |
| return rewriteValuegeneric_OpSignExt8to64(v) |
| case OpSliceCap: |
| return rewriteValuegeneric_OpSliceCap(v) |
| case OpSliceLen: |
| return rewriteValuegeneric_OpSliceLen(v) |
| case OpSlicePtr: |
| return rewriteValuegeneric_OpSlicePtr(v) |
| case OpSlicemask: |
| return rewriteValuegeneric_OpSlicemask(v) |
| case OpSqrt: |
| return rewriteValuegeneric_OpSqrt(v) |
| case OpStaticCall: |
| return rewriteValuegeneric_OpStaticCall(v) |
| case OpStaticLECall: |
| return rewriteValuegeneric_OpStaticLECall(v) |
| case OpStore: |
| return rewriteValuegeneric_OpStore(v) |
| case OpStringLen: |
| return rewriteValuegeneric_OpStringLen(v) |
| case OpStringPtr: |
| return rewriteValuegeneric_OpStringPtr(v) |
| case OpStructSelect: |
| return rewriteValuegeneric_OpStructSelect(v) |
| case OpSub16: |
| return rewriteValuegeneric_OpSub16(v) |
| case OpSub32: |
| return rewriteValuegeneric_OpSub32(v) |
| case OpSub32F: |
| return rewriteValuegeneric_OpSub32F(v) |
| case OpSub64: |
| return rewriteValuegeneric_OpSub64(v) |
| case OpSub64F: |
| return rewriteValuegeneric_OpSub64F(v) |
| case OpSub8: |
| return rewriteValuegeneric_OpSub8(v) |
| case OpTrunc: |
| return rewriteValuegeneric_OpTrunc(v) |
| case OpTrunc16to8: |
| return rewriteValuegeneric_OpTrunc16to8(v) |
| case OpTrunc32to16: |
| return rewriteValuegeneric_OpTrunc32to16(v) |
| case OpTrunc32to8: |
| return rewriteValuegeneric_OpTrunc32to8(v) |
| case OpTrunc64to16: |
| return rewriteValuegeneric_OpTrunc64to16(v) |
| case OpTrunc64to32: |
| return rewriteValuegeneric_OpTrunc64to32(v) |
| case OpTrunc64to8: |
| return rewriteValuegeneric_OpTrunc64to8(v) |
| case OpXor16: |
| return rewriteValuegeneric_OpXor16(v) |
| case OpXor32: |
| return rewriteValuegeneric_OpXor32(v) |
| case OpXor64: |
| return rewriteValuegeneric_OpXor64(v) |
| case OpXor8: |
| return rewriteValuegeneric_OpXor8(v) |
| case OpZero: |
| return rewriteValuegeneric_OpZero(v) |
| case OpZeroExt16to32: |
| return rewriteValuegeneric_OpZeroExt16to32(v) |
| case OpZeroExt16to64: |
| return rewriteValuegeneric_OpZeroExt16to64(v) |
| case OpZeroExt32to64: |
| return rewriteValuegeneric_OpZeroExt32to64(v) |
| case OpZeroExt8to16: |
| return rewriteValuegeneric_OpZeroExt8to16(v) |
| case OpZeroExt8to32: |
| return rewriteValuegeneric_OpZeroExt8to32(v) |
| case OpZeroExt8to64: |
| return rewriteValuegeneric_OpZeroExt8to64(v) |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd16(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| // match: (Add16 (Const16 [c]) (Const16 [d])) |
| // result: (Const16 [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1.AuxInt) |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| // match: (Add16 <t> (Mul16 x y) (Mul16 x z)) |
| // result: (Mul16 x (Add16 <t> y z)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpMul16 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| x := v_0_0 |
| y := v_0_1 |
| if v_1.Op != OpMul16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| z := v_1_1 |
| v.reset(OpMul16) |
| v0 := b.NewValue0(v.Pos, OpAdd16, t) |
| v0.AddArg2(y, z) |
| v.AddArg2(x, v0) |
| return true |
| } |
| } |
| } |
| break |
| } |
| // match: (Add16 (Const16 [0]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (Add16 x (Neg16 y)) |
| // result: (Sub16 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpNeg16 { |
| continue |
| } |
| y := v_1.Args[0] |
| v.reset(OpSub16) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add16 (Com16 x) x) |
| // result: (Const16 [-1]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom16 { |
| continue |
| } |
| x := v_0.Args[0] |
| if x != v_1 { |
| continue |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(-1) |
| return true |
| } |
| break |
| } |
| // match: (Add16 (Sub16 x t) (Add16 t y)) |
| // result: (Add16 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub16 { |
| continue |
| } |
| t := v_0.Args[1] |
| x := v_0.Args[0] |
| if v_1.Op != OpAdd16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if t != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAdd16) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add16 (Const16 [1]) (Com16 x)) |
| // result: (Neg16 x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 || v_1.Op != OpCom16 { |
| continue |
| } |
| x := v_1.Args[0] |
| v.reset(OpNeg16) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| // match: (Add16 x (Sub16 y x)) |
| // result: y |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpSub16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| y := v_1.Args[0] |
| if x != v_1.Args[1] { |
| continue |
| } |
| v.copyOf(y) |
| return true |
| } |
| break |
| } |
| // match: (Add16 x (Add16 y (Sub16 z x))) |
| // result: (Add16 y z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAdd16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| y := v_1_0 |
| if v_1_1.Op != OpSub16 { |
| continue |
| } |
| _ = v_1_1.Args[1] |
| z := v_1_1.Args[0] |
| if x != v_1_1.Args[1] { |
| continue |
| } |
| v.reset(OpAdd16) |
| v.AddArg2(y, z) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add16 (Add16 i:(Const16 <t>) z) x) |
| // cond: (z.Op != OpConst16 && x.Op != OpConst16) |
| // result: (Add16 i (Add16 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAdd16 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst16 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst16 && x.Op != OpConst16) { |
| continue |
| } |
| v.reset(OpAdd16) |
| v0 := b.NewValue0(v.Pos, OpAdd16, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add16 (Sub16 i:(Const16 <t>) z) x) |
| // cond: (z.Op != OpConst16 && x.Op != OpConst16) |
| // result: (Add16 i (Sub16 <t> x z)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub16 { |
| continue |
| } |
| z := v_0.Args[1] |
| i := v_0.Args[0] |
| if i.Op != OpConst16 { |
| continue |
| } |
| t := i.Type |
| x := v_1 |
| if !(z.Op != OpConst16 && x.Op != OpConst16) { |
| continue |
| } |
| v.reset(OpAdd16) |
| v0 := b.NewValue0(v.Pos, OpSub16, t) |
| v0.AddArg2(x, z) |
| v.AddArg2(i, v0) |
| return true |
| } |
| break |
| } |
| // match: (Add16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) |
| // result: (Add16 (Const16 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpAdd16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst16 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt16(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAdd16) |
| v0 := b.NewValue0(v.Pos, OpConst16, t) |
| v0.AuxInt = int16ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x)) |
| // result: (Sub16 (Const16 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpSub16 { |
| continue |
| } |
| x := v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst16 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt16(v_1_0.AuxInt) |
| v.reset(OpSub16) |
| v0 := b.NewValue0(v.Pos, OpConst16, t) |
| v0.AuxInt = int16ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| break |
| } |
| // match: (Add16 (Lsh16x64 x z:(Const64 <t> [c])) (Rsh16Ux64 x (Const64 [d]))) |
| // cond: c < 16 && d == 16-c && canRotate(config, 16) |
| // result: (RotateLeft16 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLsh16x64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| x := v_0.Args[0] |
| z := v_0.Args[1] |
| if z.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(z.AuxInt) |
| if v_1.Op != OpRsh16Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(c < 16 && d == 16-c && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add16 left:(Lsh16x64 x y) right:(Rsh16Ux64 x (Sub64 (Const64 [16]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16) |
| // result: (RotateLeft16 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh16x64 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh16Ux64 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub64 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add16 left:(Lsh16x32 x y) right:(Rsh16Ux32 x (Sub32 (Const32 [16]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16) |
| // result: (RotateLeft16 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh16x32 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh16Ux32 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub32 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add16 left:(Lsh16x16 x y) right:(Rsh16Ux16 x (Sub16 (Const16 [16]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16) |
| // result: (RotateLeft16 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh16x16 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh16Ux16 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub16 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add16 left:(Lsh16x8 x y) right:(Rsh16Ux8 x (Sub8 (Const8 [16]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16) |
| // result: (RotateLeft16 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh16x8 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh16Ux8 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub8 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 16 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add16 right:(Rsh16Ux64 x y) left:(Lsh16x64 x z:(Sub64 (Const64 [16]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16) |
| // result: (RotateLeft16 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh16Ux64 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh16x64 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub64 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add16 right:(Rsh16Ux32 x y) left:(Lsh16x32 x z:(Sub32 (Const32 [16]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16) |
| // result: (RotateLeft16 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh16Ux32 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh16x32 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub32 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add16 right:(Rsh16Ux16 x y) left:(Lsh16x16 x z:(Sub16 (Const16 [16]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16) |
| // result: (RotateLeft16 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh16Ux16 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh16x16 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub16 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add16 right:(Rsh16Ux8 x y) left:(Lsh16x8 x z:(Sub8 (Const8 [16]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16) |
| // result: (RotateLeft16 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh16Ux8 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh16x8 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub8 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 16 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 16)) { |
| continue |
| } |
| v.reset(OpRotateLeft16) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd32(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| // match: (Add32 (Const32 [c]) (Const32 [d])) |
| // result: (Const32 [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1.AuxInt) |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| // match: (Add32 <t> (Mul32 x y) (Mul32 x z)) |
| // result: (Mul32 x (Add32 <t> y z)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpMul32 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| x := v_0_0 |
| y := v_0_1 |
| if v_1.Op != OpMul32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| z := v_1_1 |
| v.reset(OpMul32) |
| v0 := b.NewValue0(v.Pos, OpAdd32, t) |
| v0.AddArg2(y, z) |
| v.AddArg2(x, v0) |
| return true |
| } |
| } |
| } |
| break |
| } |
| // match: (Add32 (Const32 [0]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (Add32 x (Neg32 y)) |
| // result: (Sub32 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpNeg32 { |
| continue |
| } |
| y := v_1.Args[0] |
| v.reset(OpSub32) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add32 (Com32 x) x) |
| // result: (Const32 [-1]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom32 { |
| continue |
| } |
| x := v_0.Args[0] |
| if x != v_1 { |
| continue |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(-1) |
| return true |
| } |
| break |
| } |
| // match: (Add32 (Sub32 x t) (Add32 t y)) |
| // result: (Add32 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub32 { |
| continue |
| } |
| t := v_0.Args[1] |
| x := v_0.Args[0] |
| if v_1.Op != OpAdd32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if t != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAdd32) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add32 (Const32 [1]) (Com32 x)) |
| // result: (Neg32 x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 || v_1.Op != OpCom32 { |
| continue |
| } |
| x := v_1.Args[0] |
| v.reset(OpNeg32) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| // match: (Add32 x (Sub32 y x)) |
| // result: y |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpSub32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| y := v_1.Args[0] |
| if x != v_1.Args[1] { |
| continue |
| } |
| v.copyOf(y) |
| return true |
| } |
| break |
| } |
| // match: (Add32 x (Add32 y (Sub32 z x))) |
| // result: (Add32 y z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAdd32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| y := v_1_0 |
| if v_1_1.Op != OpSub32 { |
| continue |
| } |
| _ = v_1_1.Args[1] |
| z := v_1_1.Args[0] |
| if x != v_1_1.Args[1] { |
| continue |
| } |
| v.reset(OpAdd32) |
| v.AddArg2(y, z) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add32 (Add32 i:(Const32 <t>) z) x) |
| // cond: (z.Op != OpConst32 && x.Op != OpConst32) |
| // result: (Add32 i (Add32 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAdd32 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst32 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst32 && x.Op != OpConst32) { |
| continue |
| } |
| v.reset(OpAdd32) |
| v0 := b.NewValue0(v.Pos, OpAdd32, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add32 (Sub32 i:(Const32 <t>) z) x) |
| // cond: (z.Op != OpConst32 && x.Op != OpConst32) |
| // result: (Add32 i (Sub32 <t> x z)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub32 { |
| continue |
| } |
| z := v_0.Args[1] |
| i := v_0.Args[0] |
| if i.Op != OpConst32 { |
| continue |
| } |
| t := i.Type |
| x := v_1 |
| if !(z.Op != OpConst32 && x.Op != OpConst32) { |
| continue |
| } |
| v.reset(OpAdd32) |
| v0 := b.NewValue0(v.Pos, OpSub32, t) |
| v0.AddArg2(x, z) |
| v.AddArg2(i, v0) |
| return true |
| } |
| break |
| } |
| // match: (Add32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) |
| // result: (Add32 (Const32 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpAdd32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst32 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt32(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAdd32) |
| v0 := b.NewValue0(v.Pos, OpConst32, t) |
| v0.AuxInt = int32ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x)) |
| // result: (Sub32 (Const32 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpSub32 { |
| continue |
| } |
| x := v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst32 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt32(v_1_0.AuxInt) |
| v.reset(OpSub32) |
| v0 := b.NewValue0(v.Pos, OpConst32, t) |
| v0.AuxInt = int32ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| break |
| } |
| // match: (Add32 (Lsh32x64 x z:(Const64 <t> [c])) (Rsh32Ux64 x (Const64 [d]))) |
| // cond: c < 32 && d == 32-c && canRotate(config, 32) |
| // result: (RotateLeft32 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLsh32x64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| x := v_0.Args[0] |
| z := v_0.Args[1] |
| if z.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(z.AuxInt) |
| if v_1.Op != OpRsh32Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(c < 32 && d == 32-c && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add32 left:(Lsh32x64 x y) right:(Rsh32Ux64 x (Sub64 (Const64 [32]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32) |
| // result: (RotateLeft32 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh32x64 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh32Ux64 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub64 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add32 left:(Lsh32x32 x y) right:(Rsh32Ux32 x (Sub32 (Const32 [32]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32) |
| // result: (RotateLeft32 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh32x32 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh32Ux32 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub32 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add32 left:(Lsh32x16 x y) right:(Rsh32Ux16 x (Sub16 (Const16 [32]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32) |
| // result: (RotateLeft32 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh32x16 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh32Ux16 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub16 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add32 left:(Lsh32x8 x y) right:(Rsh32Ux8 x (Sub8 (Const8 [32]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32) |
| // result: (RotateLeft32 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh32x8 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh32Ux8 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub8 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 32 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add32 right:(Rsh32Ux64 x y) left:(Lsh32x64 x z:(Sub64 (Const64 [32]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32) |
| // result: (RotateLeft32 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh32Ux64 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh32x64 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub64 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add32 right:(Rsh32Ux32 x y) left:(Lsh32x32 x z:(Sub32 (Const32 [32]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32) |
| // result: (RotateLeft32 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh32Ux32 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh32x32 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub32 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add32 right:(Rsh32Ux16 x y) left:(Lsh32x16 x z:(Sub16 (Const16 [32]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32) |
| // result: (RotateLeft32 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh32Ux16 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh32x16 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub16 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add32 right:(Rsh32Ux8 x y) left:(Lsh32x8 x z:(Sub8 (Const8 [32]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32) |
| // result: (RotateLeft32 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh32Ux8 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh32x8 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub8 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 32 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 32)) { |
| continue |
| } |
| v.reset(OpRotateLeft32) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd32F(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| // match: (Add32F (Const32F [c]) (Const32F [d])) |
| // cond: c+d == c+d |
| // result: (Const32F [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32F { |
| continue |
| } |
| c := auxIntToFloat32(v_0.AuxInt) |
| if v_1.Op != OpConst32F { |
| continue |
| } |
| d := auxIntToFloat32(v_1.AuxInt) |
| if !(c+d == c+d) { |
| continue |
| } |
| v.reset(OpConst32F) |
| v.AuxInt = float32ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd64(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| // match: (Add64 (Const64 [c]) (Const64 [d])) |
| // result: (Const64 [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1.AuxInt) |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| // match: (Add64 <t> (Mul64 x y) (Mul64 x z)) |
| // result: (Mul64 x (Add64 <t> y z)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpMul64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| x := v_0_0 |
| y := v_0_1 |
| if v_1.Op != OpMul64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| z := v_1_1 |
| v.reset(OpMul64) |
| v0 := b.NewValue0(v.Pos, OpAdd64, t) |
| v0.AddArg2(y, z) |
| v.AddArg2(x, v0) |
| return true |
| } |
| } |
| } |
| break |
| } |
| // match: (Add64 (Const64 [0]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (Add64 x (Neg64 y)) |
| // result: (Sub64 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpNeg64 { |
| continue |
| } |
| y := v_1.Args[0] |
| v.reset(OpSub64) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add64 (Com64 x) x) |
| // result: (Const64 [-1]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom64 { |
| continue |
| } |
| x := v_0.Args[0] |
| if x != v_1 { |
| continue |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(-1) |
| return true |
| } |
| break |
| } |
| // match: (Add64 (Sub64 x t) (Add64 t y)) |
| // result: (Add64 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub64 { |
| continue |
| } |
| t := v_0.Args[1] |
| x := v_0.Args[0] |
| if v_1.Op != OpAdd64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if t != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAdd64) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add64 (Const64 [1]) (Com64 x)) |
| // result: (Neg64 x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 || v_1.Op != OpCom64 { |
| continue |
| } |
| x := v_1.Args[0] |
| v.reset(OpNeg64) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| // match: (Add64 x (Sub64 y x)) |
| // result: y |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpSub64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| y := v_1.Args[0] |
| if x != v_1.Args[1] { |
| continue |
| } |
| v.copyOf(y) |
| return true |
| } |
| break |
| } |
| // match: (Add64 x (Add64 y (Sub64 z x))) |
| // result: (Add64 y z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAdd64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| y := v_1_0 |
| if v_1_1.Op != OpSub64 { |
| continue |
| } |
| _ = v_1_1.Args[1] |
| z := v_1_1.Args[0] |
| if x != v_1_1.Args[1] { |
| continue |
| } |
| v.reset(OpAdd64) |
| v.AddArg2(y, z) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add64 (Add64 i:(Const64 <t>) z) x) |
| // cond: (z.Op != OpConst64 && x.Op != OpConst64) |
| // result: (Add64 i (Add64 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAdd64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst64 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst64 && x.Op != OpConst64) { |
| continue |
| } |
| v.reset(OpAdd64) |
| v0 := b.NewValue0(v.Pos, OpAdd64, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add64 (Sub64 i:(Const64 <t>) z) x) |
| // cond: (z.Op != OpConst64 && x.Op != OpConst64) |
| // result: (Add64 i (Sub64 <t> x z)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub64 { |
| continue |
| } |
| z := v_0.Args[1] |
| i := v_0.Args[0] |
| if i.Op != OpConst64 { |
| continue |
| } |
| t := i.Type |
| x := v_1 |
| if !(z.Op != OpConst64 && x.Op != OpConst64) { |
| continue |
| } |
| v.reset(OpAdd64) |
| v0 := b.NewValue0(v.Pos, OpSub64, t) |
| v0.AddArg2(x, z) |
| v.AddArg2(i, v0) |
| return true |
| } |
| break |
| } |
| // match: (Add64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) |
| // result: (Add64 (Const64 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpAdd64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst64 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt64(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAdd64) |
| v0 := b.NewValue0(v.Pos, OpConst64, t) |
| v0.AuxInt = int64ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x)) |
| // result: (Sub64 (Const64 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpSub64 { |
| continue |
| } |
| x := v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst64 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt64(v_1_0.AuxInt) |
| v.reset(OpSub64) |
| v0 := b.NewValue0(v.Pos, OpConst64, t) |
| v0.AuxInt = int64ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| break |
| } |
| // match: (Add64 (Lsh64x64 x z:(Const64 <t> [c])) (Rsh64Ux64 x (Const64 [d]))) |
| // cond: c < 64 && d == 64-c && canRotate(config, 64) |
| // result: (RotateLeft64 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLsh64x64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| x := v_0.Args[0] |
| z := v_0.Args[1] |
| if z.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(z.AuxInt) |
| if v_1.Op != OpRsh64Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(c < 64 && d == 64-c && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add64 left:(Lsh64x64 x y) right:(Rsh64Ux64 x (Sub64 (Const64 [64]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64) |
| // result: (RotateLeft64 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh64x64 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh64Ux64 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub64 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add64 left:(Lsh64x32 x y) right:(Rsh64Ux32 x (Sub32 (Const32 [64]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64) |
| // result: (RotateLeft64 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh64x32 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh64Ux32 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub32 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add64 left:(Lsh64x16 x y) right:(Rsh64Ux16 x (Sub16 (Const16 [64]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64) |
| // result: (RotateLeft64 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh64x16 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh64Ux16 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub16 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add64 left:(Lsh64x8 x y) right:(Rsh64Ux8 x (Sub8 (Const8 [64]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64) |
| // result: (RotateLeft64 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh64x8 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh64Ux8 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub8 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 64 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add64 right:(Rsh64Ux64 x y) left:(Lsh64x64 x z:(Sub64 (Const64 [64]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64) |
| // result: (RotateLeft64 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh64Ux64 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh64x64 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub64 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add64 right:(Rsh64Ux32 x y) left:(Lsh64x32 x z:(Sub32 (Const32 [64]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64) |
| // result: (RotateLeft64 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh64Ux32 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh64x32 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub32 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add64 right:(Rsh64Ux16 x y) left:(Lsh64x16 x z:(Sub16 (Const16 [64]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64) |
| // result: (RotateLeft64 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh64Ux16 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh64x16 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub16 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add64 right:(Rsh64Ux8 x y) left:(Lsh64x8 x z:(Sub8 (Const8 [64]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64) |
| // result: (RotateLeft64 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh64Ux8 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh64x8 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub8 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 64 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 64)) { |
| continue |
| } |
| v.reset(OpRotateLeft64) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd64F(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| // match: (Add64F (Const64F [c]) (Const64F [d])) |
| // cond: c+d == c+d |
| // result: (Const64F [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64F { |
| continue |
| } |
| c := auxIntToFloat64(v_0.AuxInt) |
| if v_1.Op != OpConst64F { |
| continue |
| } |
| d := auxIntToFloat64(v_1.AuxInt) |
| if !(c+d == c+d) { |
| continue |
| } |
| v.reset(OpConst64F) |
| v.AuxInt = float64ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd8(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| // match: (Add8 (Const8 [c]) (Const8 [d])) |
| // result: (Const8 [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1.AuxInt) |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| // match: (Add8 <t> (Mul8 x y) (Mul8 x z)) |
| // result: (Mul8 x (Add8 <t> y z)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpMul8 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| x := v_0_0 |
| y := v_0_1 |
| if v_1.Op != OpMul8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| z := v_1_1 |
| v.reset(OpMul8) |
| v0 := b.NewValue0(v.Pos, OpAdd8, t) |
| v0.AddArg2(y, z) |
| v.AddArg2(x, v0) |
| return true |
| } |
| } |
| } |
| break |
| } |
| // match: (Add8 (Const8 [0]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (Add8 x (Neg8 y)) |
| // result: (Sub8 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpNeg8 { |
| continue |
| } |
| y := v_1.Args[0] |
| v.reset(OpSub8) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add8 (Com8 x) x) |
| // result: (Const8 [-1]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom8 { |
| continue |
| } |
| x := v_0.Args[0] |
| if x != v_1 { |
| continue |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(-1) |
| return true |
| } |
| break |
| } |
| // match: (Add8 (Sub8 x t) (Add8 t y)) |
| // result: (Add8 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub8 { |
| continue |
| } |
| t := v_0.Args[1] |
| x := v_0.Args[0] |
| if v_1.Op != OpAdd8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if t != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAdd8) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add8 (Const8 [1]) (Com8 x)) |
| // result: (Neg8 x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 || v_1.Op != OpCom8 { |
| continue |
| } |
| x := v_1.Args[0] |
| v.reset(OpNeg8) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| // match: (Add8 x (Sub8 y x)) |
| // result: y |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpSub8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| y := v_1.Args[0] |
| if x != v_1.Args[1] { |
| continue |
| } |
| v.copyOf(y) |
| return true |
| } |
| break |
| } |
| // match: (Add8 x (Add8 y (Sub8 z x))) |
| // result: (Add8 y z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAdd8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| y := v_1_0 |
| if v_1_1.Op != OpSub8 { |
| continue |
| } |
| _ = v_1_1.Args[1] |
| z := v_1_1.Args[0] |
| if x != v_1_1.Args[1] { |
| continue |
| } |
| v.reset(OpAdd8) |
| v.AddArg2(y, z) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add8 (Add8 i:(Const8 <t>) z) x) |
| // cond: (z.Op != OpConst8 && x.Op != OpConst8) |
| // result: (Add8 i (Add8 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAdd8 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst8 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst8 && x.Op != OpConst8) { |
| continue |
| } |
| v.reset(OpAdd8) |
| v0 := b.NewValue0(v.Pos, OpAdd8, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add8 (Sub8 i:(Const8 <t>) z) x) |
| // cond: (z.Op != OpConst8 && x.Op != OpConst8) |
| // result: (Add8 i (Sub8 <t> x z)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub8 { |
| continue |
| } |
| z := v_0.Args[1] |
| i := v_0.Args[0] |
| if i.Op != OpConst8 { |
| continue |
| } |
| t := i.Type |
| x := v_1 |
| if !(z.Op != OpConst8 && x.Op != OpConst8) { |
| continue |
| } |
| v.reset(OpAdd8) |
| v0 := b.NewValue0(v.Pos, OpSub8, t) |
| v0.AddArg2(x, z) |
| v.AddArg2(i, v0) |
| return true |
| } |
| break |
| } |
| // match: (Add8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) |
| // result: (Add8 (Const8 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpAdd8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst8 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt8(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAdd8) |
| v0 := b.NewValue0(v.Pos, OpConst8, t) |
| v0.AuxInt = int8ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x)) |
| // result: (Sub8 (Const8 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpSub8 { |
| continue |
| } |
| x := v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst8 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt8(v_1_0.AuxInt) |
| v.reset(OpSub8) |
| v0 := b.NewValue0(v.Pos, OpConst8, t) |
| v0.AuxInt = int8ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| break |
| } |
| // match: (Add8 (Lsh8x64 x z:(Const64 <t> [c])) (Rsh8Ux64 x (Const64 [d]))) |
| // cond: c < 8 && d == 8-c && canRotate(config, 8) |
| // result: (RotateLeft8 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLsh8x64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| x := v_0.Args[0] |
| z := v_0.Args[1] |
| if z.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(z.AuxInt) |
| if v_1.Op != OpRsh8Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(c < 8 && d == 8-c && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add8 left:(Lsh8x64 x y) right:(Rsh8Ux64 x (Sub64 (Const64 [8]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8) |
| // result: (RotateLeft8 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh8x64 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh8Ux64 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub64 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst64 || auxIntToInt64(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add8 left:(Lsh8x32 x y) right:(Rsh8Ux32 x (Sub32 (Const32 [8]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8) |
| // result: (RotateLeft8 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh8x32 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh8Ux32 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub32 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst32 || auxIntToInt32(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add8 left:(Lsh8x16 x y) right:(Rsh8Ux16 x (Sub16 (Const16 [8]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8) |
| // result: (RotateLeft8 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh8x16 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh8Ux16 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub16 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst16 || auxIntToInt16(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add8 left:(Lsh8x8 x y) right:(Rsh8Ux8 x (Sub8 (Const8 [8]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8) |
| // result: (RotateLeft8 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| left := v_0 |
| if left.Op != OpLsh8x8 { |
| continue |
| } |
| y := left.Args[1] |
| x := left.Args[0] |
| right := v_1 |
| if right.Op != OpRsh8Ux8 { |
| continue |
| } |
| _ = right.Args[1] |
| if x != right.Args[0] { |
| continue |
| } |
| right_1 := right.Args[1] |
| if right_1.Op != OpSub8 { |
| continue |
| } |
| _ = right_1.Args[1] |
| right_1_0 := right_1.Args[0] |
| if right_1_0.Op != OpConst8 || auxIntToInt8(right_1_0.AuxInt) != 8 || y != right_1.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, y) |
| return true |
| } |
| break |
| } |
| // match: (Add8 right:(Rsh8Ux64 x y) left:(Lsh8x64 x z:(Sub64 (Const64 [8]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8) |
| // result: (RotateLeft8 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh8Ux64 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh8x64 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub64 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst64 || auxIntToInt64(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add8 right:(Rsh8Ux32 x y) left:(Lsh8x32 x z:(Sub32 (Const32 [8]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8) |
| // result: (RotateLeft8 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh8Ux32 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh8x32 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub32 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst32 || auxIntToInt32(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add8 right:(Rsh8Ux16 x y) left:(Lsh8x16 x z:(Sub16 (Const16 [8]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8) |
| // result: (RotateLeft8 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh8Ux16 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh8x16 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub16 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst16 || auxIntToInt16(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| // match: (Add8 right:(Rsh8Ux8 x y) left:(Lsh8x8 x z:(Sub8 (Const8 [8]) y))) |
| // cond: (shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8) |
| // result: (RotateLeft8 x z) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| right := v_0 |
| if right.Op != OpRsh8Ux8 { |
| continue |
| } |
| y := right.Args[1] |
| x := right.Args[0] |
| left := v_1 |
| if left.Op != OpLsh8x8 { |
| continue |
| } |
| _ = left.Args[1] |
| if x != left.Args[0] { |
| continue |
| } |
| z := left.Args[1] |
| if z.Op != OpSub8 { |
| continue |
| } |
| _ = z.Args[1] |
| z_0 := z.Args[0] |
| if z_0.Op != OpConst8 || auxIntToInt8(z_0.AuxInt) != 8 || y != z.Args[1] || !((shiftIsBounded(left) || shiftIsBounded(right)) && canRotate(config, 8)) { |
| continue |
| } |
| v.reset(OpRotateLeft8) |
| v.AddArg2(x, z) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAddPtr(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| // match: (AddPtr <t> x (Const64 [c])) |
| // result: (OffPtr <t> x [c]) |
| for { |
| t := v.Type |
| x := v_0 |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := auxIntToInt64(v_1.AuxInt) |
| v.reset(OpOffPtr) |
| v.Type = t |
| v.AuxInt = int64ToAuxInt(c) |
| v.AddArg(x) |
| return true |
| } |
| // match: (AddPtr <t> x (Const32 [c])) |
| // result: (OffPtr <t> x [int64(c)]) |
| for { |
| t := v.Type |
| x := v_0 |
| if v_1.Op != OpConst32 { |
| break |
| } |
| c := auxIntToInt32(v_1.AuxInt) |
| v.reset(OpOffPtr) |
| v.Type = t |
| v.AuxInt = int64ToAuxInt(int64(c)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd16(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (And16 (Const16 [c]) (Const16 [d])) |
| // result: (Const16 [c&d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1.AuxInt) |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(c & d) |
| return true |
| } |
| break |
| } |
| // match: (And16 <t> (Com16 x) (Com16 y)) |
| // result: (Com16 (Or16 <t> x y)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom16 { |
| continue |
| } |
| x := v_0.Args[0] |
| if v_1.Op != OpCom16 { |
| continue |
| } |
| y := v_1.Args[0] |
| v.reset(OpCom16) |
| v0 := b.NewValue0(v.Pos, OpOr16, t) |
| v0.AddArg2(x, y) |
| v.AddArg(v0) |
| return true |
| } |
| break |
| } |
| // match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c]))) |
| // cond: c >= int64(16-ntz16(m)) |
| // result: (Const16 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| m := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpRsh16Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(16-ntz16(m))) { |
| continue |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c]))) |
| // cond: c >= int64(16-nlz16(m)) |
| // result: (Const16 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| m := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpLsh16x64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(16-nlz16(m))) { |
| continue |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And16 x x) |
| // result: x |
| for { |
| x := v_0 |
| if x != v_1 { |
| break |
| } |
| v.copyOf(x) |
| return true |
| } |
| // match: (And16 (Const16 [-1]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (And16 (Const16 [0]) _) |
| // result: (Const16 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { |
| continue |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And16 (Com16 x) x) |
| // result: (Const16 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom16 { |
| continue |
| } |
| x := v_0.Args[0] |
| if x != v_1 { |
| continue |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And16 x (And16 x y)) |
| // result: (And16 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAnd16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAnd16) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And16 (And16 i:(Const16 <t>) z) x) |
| // cond: (z.Op != OpConst16 && x.Op != OpConst16) |
| // result: (And16 i (And16 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAnd16 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst16 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst16 && x.Op != OpConst16) { |
| continue |
| } |
| v.reset(OpAnd16) |
| v0 := b.NewValue0(v.Pos, OpAnd16, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And16 (Const16 <t> [c]) (And16 (Const16 <t> [d]) x)) |
| // result: (And16 (Const16 <t> [c&d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpAnd16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst16 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt16(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAnd16) |
| v0 := b.NewValue0(v.Pos, OpConst16, t) |
| v0.AuxInt = int16ToAuxInt(c & d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd32(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (And32 (Const32 [c]) (Const32 [d])) |
| // result: (Const32 [c&d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1.AuxInt) |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(c & d) |
| return true |
| } |
| break |
| } |
| // match: (And32 <t> (Com32 x) (Com32 y)) |
| // result: (Com32 (Or32 <t> x y)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom32 { |
| continue |
| } |
| x := v_0.Args[0] |
| if v_1.Op != OpCom32 { |
| continue |
| } |
| y := v_1.Args[0] |
| v.reset(OpCom32) |
| v0 := b.NewValue0(v.Pos, OpOr32, t) |
| v0.AddArg2(x, y) |
| v.AddArg(v0) |
| return true |
| } |
| break |
| } |
| // match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c]))) |
| // cond: c >= int64(32-ntz32(m)) |
| // result: (Const32 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| m := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpRsh32Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(32-ntz32(m))) { |
| continue |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c]))) |
| // cond: c >= int64(32-nlz32(m)) |
| // result: (Const32 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| m := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpLsh32x64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(32-nlz32(m))) { |
| continue |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And32 x x) |
| // result: x |
| for { |
| x := v_0 |
| if x != v_1 { |
| break |
| } |
| v.copyOf(x) |
| return true |
| } |
| // match: (And32 (Const32 [-1]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (And32 (Const32 [0]) _) |
| // result: (Const32 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { |
| continue |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And32 (Com32 x) x) |
| // result: (Const32 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom32 { |
| continue |
| } |
| x := v_0.Args[0] |
| if x != v_1 { |
| continue |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And32 x (And32 x y)) |
| // result: (And32 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAnd32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAnd32) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And32 (And32 i:(Const32 <t>) z) x) |
| // cond: (z.Op != OpConst32 && x.Op != OpConst32) |
| // result: (And32 i (And32 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAnd32 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst32 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst32 && x.Op != OpConst32) { |
| continue |
| } |
| v.reset(OpAnd32) |
| v0 := b.NewValue0(v.Pos, OpAnd32, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And32 (Const32 <t> [c]) (And32 (Const32 <t> [d]) x)) |
| // result: (And32 (Const32 <t> [c&d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpAnd32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst32 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt32(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAnd32) |
| v0 := b.NewValue0(v.Pos, OpConst32, t) |
| v0.AuxInt = int32ToAuxInt(c & d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd64(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (And64 (Const64 [c]) (Const64 [d])) |
| // result: (Const64 [c&d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1.AuxInt) |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(c & d) |
| return true |
| } |
| break |
| } |
| // match: (And64 <t> (Com64 x) (Com64 y)) |
| // result: (Com64 (Or64 <t> x y)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom64 { |
| continue |
| } |
| x := v_0.Args[0] |
| if v_1.Op != OpCom64 { |
| continue |
| } |
| y := v_1.Args[0] |
| v.reset(OpCom64) |
| v0 := b.NewValue0(v.Pos, OpOr64, t) |
| v0.AddArg2(x, y) |
| v.AddArg(v0) |
| return true |
| } |
| break |
| } |
| // match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c]))) |
| // cond: c >= int64(64-ntz64(m)) |
| // result: (Const64 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| m := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpRsh64Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(64-ntz64(m))) { |
| continue |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c]))) |
| // cond: c >= int64(64-nlz64(m)) |
| // result: (Const64 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| m := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpLsh64x64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(64-nlz64(m))) { |
| continue |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And64 x x) |
| // result: x |
| for { |
| x := v_0 |
| if x != v_1 { |
| break |
| } |
| v.copyOf(x) |
| return true |
| } |
| // match: (And64 (Const64 [-1]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (And64 (Const64 [0]) _) |
| // result: (Const64 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { |
| continue |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And64 (Com64 x) x) |
| // result: (Const64 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom64 { |
| continue |
| } |
| x := v_0.Args[0] |
| if x != v_1 { |
| continue |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And64 x (And64 x y)) |
| // result: (And64 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAnd64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAnd64) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And64 (And64 i:(Const64 <t>) z) x) |
| // cond: (z.Op != OpConst64 && x.Op != OpConst64) |
| // result: (And64 i (And64 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAnd64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst64 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst64 && x.Op != OpConst64) { |
| continue |
| } |
| v.reset(OpAnd64) |
| v0 := b.NewValue0(v.Pos, OpAnd64, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And64 (Const64 <t> [c]) (And64 (Const64 <t> [d]) x)) |
| // result: (And64 (Const64 <t> [c&d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpAnd64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst64 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt64(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAnd64) |
| v0 := b.NewValue0(v.Pos, OpConst64, t) |
| v0.AuxInt = int64ToAuxInt(c & d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd8(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (And8 (Const8 [c]) (Const8 [d])) |
| // result: (Const8 [c&d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1.AuxInt) |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(c & d) |
| return true |
| } |
| break |
| } |
| // match: (And8 <t> (Com8 x) (Com8 y)) |
| // result: (Com8 (Or8 <t> x y)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom8 { |
| continue |
| } |
| x := v_0.Args[0] |
| if v_1.Op != OpCom8 { |
| continue |
| } |
| y := v_1.Args[0] |
| v.reset(OpCom8) |
| v0 := b.NewValue0(v.Pos, OpOr8, t) |
| v0.AddArg2(x, y) |
| v.AddArg(v0) |
| return true |
| } |
| break |
| } |
| // match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c]))) |
| // cond: c >= int64(8-ntz8(m)) |
| // result: (Const8 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| m := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpRsh8Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(8-ntz8(m))) { |
| continue |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c]))) |
| // cond: c >= int64(8-nlz8(m)) |
| // result: (Const8 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| m := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpLsh8x64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(8-nlz8(m))) { |
| continue |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And8 x x) |
| // result: x |
| for { |
| x := v_0 |
| if x != v_1 { |
| break |
| } |
| v.copyOf(x) |
| return true |
| } |
| // match: (And8 (Const8 [-1]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (And8 (Const8 [0]) _) |
| // result: (Const8 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { |
| continue |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And8 (Com8 x) x) |
| // result: (Const8 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpCom8 { |
| continue |
| } |
| x := v_0.Args[0] |
| if x != v_1 { |
| continue |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And8 x (And8 x y)) |
| // result: (And8 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAnd8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAnd8) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And8 (And8 i:(Const8 <t>) z) x) |
| // cond: (z.Op != OpConst8 && x.Op != OpConst8) |
| // result: (And8 i (And8 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAnd8 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst8 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst8 && x.Op != OpConst8) { |
| continue |
| } |
| v.reset(OpAnd8) |
| v0 := b.NewValue0(v.Pos, OpAnd8, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And8 (Const8 <t> [c]) (And8 (Const8 <t> [d]) x)) |
| // result: (And8 (Const8 <t> [c&d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpAnd8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst8 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt8(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAnd8) |
| v0 := b.NewValue0(v.Pos, OpConst8, t) |
| v0.AuxInt = int8ToAuxInt(c & d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAndB(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (AndB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d]))) |
| // cond: d >= c |
| // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq64 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLess64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLess64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d]))) |
| // cond: d >= c |
| // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq64 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLeq64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLeq64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d]))) |
| // cond: d >= c |
| // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq32 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLess32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLess32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d]))) |
| // cond: d >= c |
| // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq32 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLeq32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLeq32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d]))) |
| // cond: d >= c |
| // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq16 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLess16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLess16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d]))) |
| // cond: d >= c |
| // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq16 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLeq16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLeq16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d]))) |
| // cond: d >= c |
| // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq8 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLess8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLess8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d]))) |
| // cond: d >= c |
| // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq8 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLeq8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLeq8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess64 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLess64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLess64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess64 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLeq64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLeq64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess32 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLess32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLess32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess32 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLeq32 { |
| cont
|