| // Code generated from gen/generic.rules; DO NOT EDIT. |
| // generated with: cd gen; go run *.go |
| |
| package ssa |
| |
| import "math" |
| import "cmd/compile/internal/types" |
| |
| func rewriteValuegeneric(v *Value) bool { |
| switch v.Op { |
| case OpAdd16: |
| return rewriteValuegeneric_OpAdd16(v) |
| case OpAdd32: |
| return rewriteValuegeneric_OpAdd32(v) |
| case OpAdd32F: |
| return rewriteValuegeneric_OpAdd32F(v) |
| case OpAdd64: |
| return rewriteValuegeneric_OpAdd64(v) |
| case OpAdd64F: |
| return rewriteValuegeneric_OpAdd64F(v) |
| case OpAdd8: |
| return rewriteValuegeneric_OpAdd8(v) |
| case OpAddPtr: |
| return rewriteValuegeneric_OpAddPtr(v) |
| case OpAnd16: |
| return rewriteValuegeneric_OpAnd16(v) |
| case OpAnd32: |
| return rewriteValuegeneric_OpAnd32(v) |
| case OpAnd64: |
| return rewriteValuegeneric_OpAnd64(v) |
| case OpAnd8: |
| return rewriteValuegeneric_OpAnd8(v) |
| case OpAndB: |
| return rewriteValuegeneric_OpAndB(v) |
| case OpArraySelect: |
| return rewriteValuegeneric_OpArraySelect(v) |
| case OpCom16: |
| return rewriteValuegeneric_OpCom16(v) |
| case OpCom32: |
| return rewriteValuegeneric_OpCom32(v) |
| case OpCom64: |
| return rewriteValuegeneric_OpCom64(v) |
| case OpCom8: |
| return rewriteValuegeneric_OpCom8(v) |
| case OpConstInterface: |
| return rewriteValuegeneric_OpConstInterface(v) |
| case OpConstSlice: |
| return rewriteValuegeneric_OpConstSlice(v) |
| case OpConstString: |
| return rewriteValuegeneric_OpConstString(v) |
| case OpConvert: |
| return rewriteValuegeneric_OpConvert(v) |
| case OpCtz16: |
| return rewriteValuegeneric_OpCtz16(v) |
| case OpCtz32: |
| return rewriteValuegeneric_OpCtz32(v) |
| case OpCtz64: |
| return rewriteValuegeneric_OpCtz64(v) |
| case OpCtz8: |
| return rewriteValuegeneric_OpCtz8(v) |
| case OpCvt32Fto32: |
| return rewriteValuegeneric_OpCvt32Fto32(v) |
| case OpCvt32Fto64: |
| return rewriteValuegeneric_OpCvt32Fto64(v) |
| case OpCvt32Fto64F: |
| return rewriteValuegeneric_OpCvt32Fto64F(v) |
| case OpCvt32to32F: |
| return rewriteValuegeneric_OpCvt32to32F(v) |
| case OpCvt32to64F: |
| return rewriteValuegeneric_OpCvt32to64F(v) |
| case OpCvt64Fto32: |
| return rewriteValuegeneric_OpCvt64Fto32(v) |
| case OpCvt64Fto32F: |
| return rewriteValuegeneric_OpCvt64Fto32F(v) |
| case OpCvt64Fto64: |
| return rewriteValuegeneric_OpCvt64Fto64(v) |
| case OpCvt64to32F: |
| return rewriteValuegeneric_OpCvt64to32F(v) |
| case OpCvt64to64F: |
| return rewriteValuegeneric_OpCvt64to64F(v) |
| case OpCvtBoolToUint8: |
| return rewriteValuegeneric_OpCvtBoolToUint8(v) |
| case OpDiv16: |
| return rewriteValuegeneric_OpDiv16(v) |
| case OpDiv16u: |
| return rewriteValuegeneric_OpDiv16u(v) |
| case OpDiv32: |
| return rewriteValuegeneric_OpDiv32(v) |
| case OpDiv32F: |
| return rewriteValuegeneric_OpDiv32F(v) |
| case OpDiv32u: |
| return rewriteValuegeneric_OpDiv32u(v) |
| case OpDiv64: |
| return rewriteValuegeneric_OpDiv64(v) |
| case OpDiv64F: |
| return rewriteValuegeneric_OpDiv64F(v) |
| case OpDiv64u: |
| return rewriteValuegeneric_OpDiv64u(v) |
| case OpDiv8: |
| return rewriteValuegeneric_OpDiv8(v) |
| case OpDiv8u: |
| return rewriteValuegeneric_OpDiv8u(v) |
| case OpEq16: |
| return rewriteValuegeneric_OpEq16(v) |
| case OpEq32: |
| return rewriteValuegeneric_OpEq32(v) |
| case OpEq32F: |
| return rewriteValuegeneric_OpEq32F(v) |
| case OpEq64: |
| return rewriteValuegeneric_OpEq64(v) |
| case OpEq64F: |
| return rewriteValuegeneric_OpEq64F(v) |
| case OpEq8: |
| return rewriteValuegeneric_OpEq8(v) |
| case OpEqB: |
| return rewriteValuegeneric_OpEqB(v) |
| case OpEqInter: |
| return rewriteValuegeneric_OpEqInter(v) |
| case OpEqPtr: |
| return rewriteValuegeneric_OpEqPtr(v) |
| case OpEqSlice: |
| return rewriteValuegeneric_OpEqSlice(v) |
| case OpIMake: |
| return rewriteValuegeneric_OpIMake(v) |
| case OpInterCall: |
| return rewriteValuegeneric_OpInterCall(v) |
| case OpIsInBounds: |
| return rewriteValuegeneric_OpIsInBounds(v) |
| case OpIsNonNil: |
| return rewriteValuegeneric_OpIsNonNil(v) |
| case OpIsSliceInBounds: |
| return rewriteValuegeneric_OpIsSliceInBounds(v) |
| case OpLeq16: |
| return rewriteValuegeneric_OpLeq16(v) |
| case OpLeq16U: |
| return rewriteValuegeneric_OpLeq16U(v) |
| case OpLeq32: |
| return rewriteValuegeneric_OpLeq32(v) |
| case OpLeq32F: |
| return rewriteValuegeneric_OpLeq32F(v) |
| case OpLeq32U: |
| return rewriteValuegeneric_OpLeq32U(v) |
| case OpLeq64: |
| return rewriteValuegeneric_OpLeq64(v) |
| case OpLeq64F: |
| return rewriteValuegeneric_OpLeq64F(v) |
| case OpLeq64U: |
| return rewriteValuegeneric_OpLeq64U(v) |
| case OpLeq8: |
| return rewriteValuegeneric_OpLeq8(v) |
| case OpLeq8U: |
| return rewriteValuegeneric_OpLeq8U(v) |
| case OpLess16: |
| return rewriteValuegeneric_OpLess16(v) |
| case OpLess16U: |
| return rewriteValuegeneric_OpLess16U(v) |
| case OpLess32: |
| return rewriteValuegeneric_OpLess32(v) |
| case OpLess32F: |
| return rewriteValuegeneric_OpLess32F(v) |
| case OpLess32U: |
| return rewriteValuegeneric_OpLess32U(v) |
| case OpLess64: |
| return rewriteValuegeneric_OpLess64(v) |
| case OpLess64F: |
| return rewriteValuegeneric_OpLess64F(v) |
| case OpLess64U: |
| return rewriteValuegeneric_OpLess64U(v) |
| case OpLess8: |
| return rewriteValuegeneric_OpLess8(v) |
| case OpLess8U: |
| return rewriteValuegeneric_OpLess8U(v) |
| case OpLoad: |
| return rewriteValuegeneric_OpLoad(v) |
| case OpLsh16x16: |
| return rewriteValuegeneric_OpLsh16x16(v) |
| case OpLsh16x32: |
| return rewriteValuegeneric_OpLsh16x32(v) |
| case OpLsh16x64: |
| return rewriteValuegeneric_OpLsh16x64(v) |
| case OpLsh16x8: |
| return rewriteValuegeneric_OpLsh16x8(v) |
| case OpLsh32x16: |
| return rewriteValuegeneric_OpLsh32x16(v) |
| case OpLsh32x32: |
| return rewriteValuegeneric_OpLsh32x32(v) |
| case OpLsh32x64: |
| return rewriteValuegeneric_OpLsh32x64(v) |
| case OpLsh32x8: |
| return rewriteValuegeneric_OpLsh32x8(v) |
| case OpLsh64x16: |
| return rewriteValuegeneric_OpLsh64x16(v) |
| case OpLsh64x32: |
| return rewriteValuegeneric_OpLsh64x32(v) |
| case OpLsh64x64: |
| return rewriteValuegeneric_OpLsh64x64(v) |
| case OpLsh64x8: |
| return rewriteValuegeneric_OpLsh64x8(v) |
| case OpLsh8x16: |
| return rewriteValuegeneric_OpLsh8x16(v) |
| case OpLsh8x32: |
| return rewriteValuegeneric_OpLsh8x32(v) |
| case OpLsh8x64: |
| return rewriteValuegeneric_OpLsh8x64(v) |
| case OpLsh8x8: |
| return rewriteValuegeneric_OpLsh8x8(v) |
| case OpMod16: |
| return rewriteValuegeneric_OpMod16(v) |
| case OpMod16u: |
| return rewriteValuegeneric_OpMod16u(v) |
| case OpMod32: |
| return rewriteValuegeneric_OpMod32(v) |
| case OpMod32u: |
| return rewriteValuegeneric_OpMod32u(v) |
| case OpMod64: |
| return rewriteValuegeneric_OpMod64(v) |
| case OpMod64u: |
| return rewriteValuegeneric_OpMod64u(v) |
| case OpMod8: |
| return rewriteValuegeneric_OpMod8(v) |
| case OpMod8u: |
| return rewriteValuegeneric_OpMod8u(v) |
| case OpMove: |
| return rewriteValuegeneric_OpMove(v) |
| case OpMul16: |
| return rewriteValuegeneric_OpMul16(v) |
| case OpMul32: |
| return rewriteValuegeneric_OpMul32(v) |
| case OpMul32F: |
| return rewriteValuegeneric_OpMul32F(v) |
| case OpMul64: |
| return rewriteValuegeneric_OpMul64(v) |
| case OpMul64F: |
| return rewriteValuegeneric_OpMul64F(v) |
| case OpMul8: |
| return rewriteValuegeneric_OpMul8(v) |
| case OpNeg16: |
| return rewriteValuegeneric_OpNeg16(v) |
| case OpNeg32: |
| return rewriteValuegeneric_OpNeg32(v) |
| case OpNeg32F: |
| return rewriteValuegeneric_OpNeg32F(v) |
| case OpNeg64: |
| return rewriteValuegeneric_OpNeg64(v) |
| case OpNeg64F: |
| return rewriteValuegeneric_OpNeg64F(v) |
| case OpNeg8: |
| return rewriteValuegeneric_OpNeg8(v) |
| case OpNeq16: |
| return rewriteValuegeneric_OpNeq16(v) |
| case OpNeq32: |
| return rewriteValuegeneric_OpNeq32(v) |
| case OpNeq32F: |
| return rewriteValuegeneric_OpNeq32F(v) |
| case OpNeq64: |
| return rewriteValuegeneric_OpNeq64(v) |
| case OpNeq64F: |
| return rewriteValuegeneric_OpNeq64F(v) |
| case OpNeq8: |
| return rewriteValuegeneric_OpNeq8(v) |
| case OpNeqB: |
| return rewriteValuegeneric_OpNeqB(v) |
| case OpNeqInter: |
| return rewriteValuegeneric_OpNeqInter(v) |
| case OpNeqPtr: |
| return rewriteValuegeneric_OpNeqPtr(v) |
| case OpNeqSlice: |
| return rewriteValuegeneric_OpNeqSlice(v) |
| case OpNilCheck: |
| return rewriteValuegeneric_OpNilCheck(v) |
| case OpNot: |
| return rewriteValuegeneric_OpNot(v) |
| case OpOffPtr: |
| return rewriteValuegeneric_OpOffPtr(v) |
| case OpOr16: |
| return rewriteValuegeneric_OpOr16(v) |
| case OpOr32: |
| return rewriteValuegeneric_OpOr32(v) |
| case OpOr64: |
| return rewriteValuegeneric_OpOr64(v) |
| case OpOr8: |
| return rewriteValuegeneric_OpOr8(v) |
| case OpOrB: |
| return rewriteValuegeneric_OpOrB(v) |
| case OpPhi: |
| return rewriteValuegeneric_OpPhi(v) |
| case OpPtrIndex: |
| return rewriteValuegeneric_OpPtrIndex(v) |
| case OpRotateLeft16: |
| return rewriteValuegeneric_OpRotateLeft16(v) |
| case OpRotateLeft32: |
| return rewriteValuegeneric_OpRotateLeft32(v) |
| case OpRotateLeft64: |
| return rewriteValuegeneric_OpRotateLeft64(v) |
| case OpRotateLeft8: |
| return rewriteValuegeneric_OpRotateLeft8(v) |
| case OpRound32F: |
| return rewriteValuegeneric_OpRound32F(v) |
| case OpRound64F: |
| return rewriteValuegeneric_OpRound64F(v) |
| case OpRsh16Ux16: |
| return rewriteValuegeneric_OpRsh16Ux16(v) |
| case OpRsh16Ux32: |
| return rewriteValuegeneric_OpRsh16Ux32(v) |
| case OpRsh16Ux64: |
| return rewriteValuegeneric_OpRsh16Ux64(v) |
| case OpRsh16Ux8: |
| return rewriteValuegeneric_OpRsh16Ux8(v) |
| case OpRsh16x16: |
| return rewriteValuegeneric_OpRsh16x16(v) |
| case OpRsh16x32: |
| return rewriteValuegeneric_OpRsh16x32(v) |
| case OpRsh16x64: |
| return rewriteValuegeneric_OpRsh16x64(v) |
| case OpRsh16x8: |
| return rewriteValuegeneric_OpRsh16x8(v) |
| case OpRsh32Ux16: |
| return rewriteValuegeneric_OpRsh32Ux16(v) |
| case OpRsh32Ux32: |
| return rewriteValuegeneric_OpRsh32Ux32(v) |
| case OpRsh32Ux64: |
| return rewriteValuegeneric_OpRsh32Ux64(v) |
| case OpRsh32Ux8: |
| return rewriteValuegeneric_OpRsh32Ux8(v) |
| case OpRsh32x16: |
| return rewriteValuegeneric_OpRsh32x16(v) |
| case OpRsh32x32: |
| return rewriteValuegeneric_OpRsh32x32(v) |
| case OpRsh32x64: |
| return rewriteValuegeneric_OpRsh32x64(v) |
| case OpRsh32x8: |
| return rewriteValuegeneric_OpRsh32x8(v) |
| case OpRsh64Ux16: |
| return rewriteValuegeneric_OpRsh64Ux16(v) |
| case OpRsh64Ux32: |
| return rewriteValuegeneric_OpRsh64Ux32(v) |
| case OpRsh64Ux64: |
| return rewriteValuegeneric_OpRsh64Ux64(v) |
| case OpRsh64Ux8: |
| return rewriteValuegeneric_OpRsh64Ux8(v) |
| case OpRsh64x16: |
| return rewriteValuegeneric_OpRsh64x16(v) |
| case OpRsh64x32: |
| return rewriteValuegeneric_OpRsh64x32(v) |
| case OpRsh64x64: |
| return rewriteValuegeneric_OpRsh64x64(v) |
| case OpRsh64x8: |
| return rewriteValuegeneric_OpRsh64x8(v) |
| case OpRsh8Ux16: |
| return rewriteValuegeneric_OpRsh8Ux16(v) |
| case OpRsh8Ux32: |
| return rewriteValuegeneric_OpRsh8Ux32(v) |
| case OpRsh8Ux64: |
| return rewriteValuegeneric_OpRsh8Ux64(v) |
| case OpRsh8Ux8: |
| return rewriteValuegeneric_OpRsh8Ux8(v) |
| case OpRsh8x16: |
| return rewriteValuegeneric_OpRsh8x16(v) |
| case OpRsh8x32: |
| return rewriteValuegeneric_OpRsh8x32(v) |
| case OpRsh8x64: |
| return rewriteValuegeneric_OpRsh8x64(v) |
| case OpRsh8x8: |
| return rewriteValuegeneric_OpRsh8x8(v) |
| case OpSelect0: |
| return rewriteValuegeneric_OpSelect0(v) |
| case OpSelect1: |
| return rewriteValuegeneric_OpSelect1(v) |
| case OpSignExt16to32: |
| return rewriteValuegeneric_OpSignExt16to32(v) |
| case OpSignExt16to64: |
| return rewriteValuegeneric_OpSignExt16to64(v) |
| case OpSignExt32to64: |
| return rewriteValuegeneric_OpSignExt32to64(v) |
| case OpSignExt8to16: |
| return rewriteValuegeneric_OpSignExt8to16(v) |
| case OpSignExt8to32: |
| return rewriteValuegeneric_OpSignExt8to32(v) |
| case OpSignExt8to64: |
| return rewriteValuegeneric_OpSignExt8to64(v) |
| case OpSliceCap: |
| return rewriteValuegeneric_OpSliceCap(v) |
| case OpSliceLen: |
| return rewriteValuegeneric_OpSliceLen(v) |
| case OpSlicePtr: |
| return rewriteValuegeneric_OpSlicePtr(v) |
| case OpSlicemask: |
| return rewriteValuegeneric_OpSlicemask(v) |
| case OpSqrt: |
| return rewriteValuegeneric_OpSqrt(v) |
| case OpStaticCall: |
| return rewriteValuegeneric_OpStaticCall(v) |
| case OpStore: |
| return rewriteValuegeneric_OpStore(v) |
| case OpStringLen: |
| return rewriteValuegeneric_OpStringLen(v) |
| case OpStringPtr: |
| return rewriteValuegeneric_OpStringPtr(v) |
| case OpStructSelect: |
| return rewriteValuegeneric_OpStructSelect(v) |
| case OpSub16: |
| return rewriteValuegeneric_OpSub16(v) |
| case OpSub32: |
| return rewriteValuegeneric_OpSub32(v) |
| case OpSub32F: |
| return rewriteValuegeneric_OpSub32F(v) |
| case OpSub64: |
| return rewriteValuegeneric_OpSub64(v) |
| case OpSub64F: |
| return rewriteValuegeneric_OpSub64F(v) |
| case OpSub8: |
| return rewriteValuegeneric_OpSub8(v) |
| case OpTrunc16to8: |
| return rewriteValuegeneric_OpTrunc16to8(v) |
| case OpTrunc32to16: |
| return rewriteValuegeneric_OpTrunc32to16(v) |
| case OpTrunc32to8: |
| return rewriteValuegeneric_OpTrunc32to8(v) |
| case OpTrunc64to16: |
| return rewriteValuegeneric_OpTrunc64to16(v) |
| case OpTrunc64to32: |
| return rewriteValuegeneric_OpTrunc64to32(v) |
| case OpTrunc64to8: |
| return rewriteValuegeneric_OpTrunc64to8(v) |
| case OpXor16: |
| return rewriteValuegeneric_OpXor16(v) |
| case OpXor32: |
| return rewriteValuegeneric_OpXor32(v) |
| case OpXor64: |
| return rewriteValuegeneric_OpXor64(v) |
| case OpXor8: |
| return rewriteValuegeneric_OpXor8(v) |
| case OpZero: |
| return rewriteValuegeneric_OpZero(v) |
| case OpZeroExt16to32: |
| return rewriteValuegeneric_OpZeroExt16to32(v) |
| case OpZeroExt16to64: |
| return rewriteValuegeneric_OpZeroExt16to64(v) |
| case OpZeroExt32to64: |
| return rewriteValuegeneric_OpZeroExt32to64(v) |
| case OpZeroExt8to16: |
| return rewriteValuegeneric_OpZeroExt8to16(v) |
| case OpZeroExt8to32: |
| return rewriteValuegeneric_OpZeroExt8to32(v) |
| case OpZeroExt8to64: |
| return rewriteValuegeneric_OpZeroExt8to64(v) |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd16(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (Add16 (Const16 [c]) (Const16 [d])) |
| // result: (Const16 [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1.AuxInt) |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| // match: (Add16 <t> (Mul16 x y) (Mul16 x z)) |
| // result: (Mul16 x (Add16 <t> y z)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpMul16 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| x := v_0_0 |
| y := v_0_1 |
| if v_1.Op != OpMul16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| z := v_1_1 |
| v.reset(OpMul16) |
| v0 := b.NewValue0(v.Pos, OpAdd16, t) |
| v0.AddArg2(y, z) |
| v.AddArg2(x, v0) |
| return true |
| } |
| } |
| } |
| break |
| } |
| // match: (Add16 (Const16 [0]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (Add16 (Const16 [1]) (Com16 x)) |
| // result: (Neg16 x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 1 || v_1.Op != OpCom16 { |
| continue |
| } |
| x := v_1.Args[0] |
| v.reset(OpNeg16) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| // match: (Add16 (Add16 i:(Const16 <t>) z) x) |
| // cond: (z.Op != OpConst16 && x.Op != OpConst16) |
| // result: (Add16 i (Add16 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAdd16 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst16 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst16 && x.Op != OpConst16) { |
| continue |
| } |
| v.reset(OpAdd16) |
| v0 := b.NewValue0(v.Pos, OpAdd16, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add16 (Sub16 i:(Const16 <t>) z) x) |
| // cond: (z.Op != OpConst16 && x.Op != OpConst16) |
| // result: (Add16 i (Sub16 <t> x z)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub16 { |
| continue |
| } |
| z := v_0.Args[1] |
| i := v_0.Args[0] |
| if i.Op != OpConst16 { |
| continue |
| } |
| t := i.Type |
| x := v_1 |
| if !(z.Op != OpConst16 && x.Op != OpConst16) { |
| continue |
| } |
| v.reset(OpAdd16) |
| v0 := b.NewValue0(v.Pos, OpSub16, t) |
| v0.AddArg2(x, z) |
| v.AddArg2(i, v0) |
| return true |
| } |
| break |
| } |
| // match: (Add16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) |
| // result: (Add16 (Const16 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpAdd16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst16 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt16(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAdd16) |
| v0 := b.NewValue0(v.Pos, OpConst16, t) |
| v0.AuxInt = int16ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x)) |
| // result: (Sub16 (Const16 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpSub16 { |
| continue |
| } |
| x := v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst16 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt16(v_1_0.AuxInt) |
| v.reset(OpSub16) |
| v0 := b.NewValue0(v.Pos, OpConst16, t) |
| v0.AuxInt = int16ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd32(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (Add32 (Const32 [c]) (Const32 [d])) |
| // result: (Const32 [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1.AuxInt) |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| // match: (Add32 <t> (Mul32 x y) (Mul32 x z)) |
| // result: (Mul32 x (Add32 <t> y z)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpMul32 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| x := v_0_0 |
| y := v_0_1 |
| if v_1.Op != OpMul32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| z := v_1_1 |
| v.reset(OpMul32) |
| v0 := b.NewValue0(v.Pos, OpAdd32, t) |
| v0.AddArg2(y, z) |
| v.AddArg2(x, v0) |
| return true |
| } |
| } |
| } |
| break |
| } |
| // match: (Add32 (Const32 [0]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (Add32 (Const32 [1]) (Com32 x)) |
| // result: (Neg32 x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 1 || v_1.Op != OpCom32 { |
| continue |
| } |
| x := v_1.Args[0] |
| v.reset(OpNeg32) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| // match: (Add32 (Add32 i:(Const32 <t>) z) x) |
| // cond: (z.Op != OpConst32 && x.Op != OpConst32) |
| // result: (Add32 i (Add32 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAdd32 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst32 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst32 && x.Op != OpConst32) { |
| continue |
| } |
| v.reset(OpAdd32) |
| v0 := b.NewValue0(v.Pos, OpAdd32, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add32 (Sub32 i:(Const32 <t>) z) x) |
| // cond: (z.Op != OpConst32 && x.Op != OpConst32) |
| // result: (Add32 i (Sub32 <t> x z)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub32 { |
| continue |
| } |
| z := v_0.Args[1] |
| i := v_0.Args[0] |
| if i.Op != OpConst32 { |
| continue |
| } |
| t := i.Type |
| x := v_1 |
| if !(z.Op != OpConst32 && x.Op != OpConst32) { |
| continue |
| } |
| v.reset(OpAdd32) |
| v0 := b.NewValue0(v.Pos, OpSub32, t) |
| v0.AddArg2(x, z) |
| v.AddArg2(i, v0) |
| return true |
| } |
| break |
| } |
| // match: (Add32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) |
| // result: (Add32 (Const32 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpAdd32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst32 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt32(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAdd32) |
| v0 := b.NewValue0(v.Pos, OpConst32, t) |
| v0.AuxInt = int32ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x)) |
| // result: (Sub32 (Const32 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpSub32 { |
| continue |
| } |
| x := v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst32 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt32(v_1_0.AuxInt) |
| v.reset(OpSub32) |
| v0 := b.NewValue0(v.Pos, OpConst32, t) |
| v0.AuxInt = int32ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd32F(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| // match: (Add32F (Const32F [c]) (Const32F [d])) |
| // cond: c+d == c+d |
| // result: (Const32F [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32F { |
| continue |
| } |
| c := auxIntToFloat32(v_0.AuxInt) |
| if v_1.Op != OpConst32F { |
| continue |
| } |
| d := auxIntToFloat32(v_1.AuxInt) |
| if !(c+d == c+d) { |
| continue |
| } |
| v.reset(OpConst32F) |
| v.AuxInt = float32ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd64(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (Add64 (Const64 [c]) (Const64 [d])) |
| // result: (Const64 [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1.AuxInt) |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| // match: (Add64 <t> (Mul64 x y) (Mul64 x z)) |
| // result: (Mul64 x (Add64 <t> y z)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpMul64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| x := v_0_0 |
| y := v_0_1 |
| if v_1.Op != OpMul64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| z := v_1_1 |
| v.reset(OpMul64) |
| v0 := b.NewValue0(v.Pos, OpAdd64, t) |
| v0.AddArg2(y, z) |
| v.AddArg2(x, v0) |
| return true |
| } |
| } |
| } |
| break |
| } |
| // match: (Add64 (Const64 [0]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (Add64 (Const64 [1]) (Com64 x)) |
| // result: (Neg64 x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 1 || v_1.Op != OpCom64 { |
| continue |
| } |
| x := v_1.Args[0] |
| v.reset(OpNeg64) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| // match: (Add64 (Add64 i:(Const64 <t>) z) x) |
| // cond: (z.Op != OpConst64 && x.Op != OpConst64) |
| // result: (Add64 i (Add64 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAdd64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst64 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst64 && x.Op != OpConst64) { |
| continue |
| } |
| v.reset(OpAdd64) |
| v0 := b.NewValue0(v.Pos, OpAdd64, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add64 (Sub64 i:(Const64 <t>) z) x) |
| // cond: (z.Op != OpConst64 && x.Op != OpConst64) |
| // result: (Add64 i (Sub64 <t> x z)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub64 { |
| continue |
| } |
| z := v_0.Args[1] |
| i := v_0.Args[0] |
| if i.Op != OpConst64 { |
| continue |
| } |
| t := i.Type |
| x := v_1 |
| if !(z.Op != OpConst64 && x.Op != OpConst64) { |
| continue |
| } |
| v.reset(OpAdd64) |
| v0 := b.NewValue0(v.Pos, OpSub64, t) |
| v0.AddArg2(x, z) |
| v.AddArg2(i, v0) |
| return true |
| } |
| break |
| } |
| // match: (Add64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) |
| // result: (Add64 (Const64 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpAdd64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst64 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt64(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAdd64) |
| v0 := b.NewValue0(v.Pos, OpConst64, t) |
| v0.AuxInt = int64ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x)) |
| // result: (Sub64 (Const64 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpSub64 { |
| continue |
| } |
| x := v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst64 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt64(v_1_0.AuxInt) |
| v.reset(OpSub64) |
| v0 := b.NewValue0(v.Pos, OpConst64, t) |
| v0.AuxInt = int64ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd64F(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| // match: (Add64F (Const64F [c]) (Const64F [d])) |
| // cond: c+d == c+d |
| // result: (Const64F [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64F { |
| continue |
| } |
| c := auxIntToFloat64(v_0.AuxInt) |
| if v_1.Op != OpConst64F { |
| continue |
| } |
| d := auxIntToFloat64(v_1.AuxInt) |
| if !(c+d == c+d) { |
| continue |
| } |
| v.reset(OpConst64F) |
| v.AuxInt = float64ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd8(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (Add8 (Const8 [c]) (Const8 [d])) |
| // result: (Const8 [c+d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1.AuxInt) |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(c + d) |
| return true |
| } |
| break |
| } |
| // match: (Add8 <t> (Mul8 x y) (Mul8 x z)) |
| // result: (Mul8 x (Add8 <t> y z)) |
| for { |
| t := v.Type |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpMul8 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| x := v_0_0 |
| y := v_0_1 |
| if v_1.Op != OpMul8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i2 := 0; _i2 <= 1; _i2, v_1_0, v_1_1 = _i2+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| z := v_1_1 |
| v.reset(OpMul8) |
| v0 := b.NewValue0(v.Pos, OpAdd8, t) |
| v0.AddArg2(y, z) |
| v.AddArg2(x, v0) |
| return true |
| } |
| } |
| } |
| break |
| } |
| // match: (Add8 (Const8 [0]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (Add8 (Const8 [1]) (Com8 x)) |
| // result: (Neg8 x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 1 || v_1.Op != OpCom8 { |
| continue |
| } |
| x := v_1.Args[0] |
| v.reset(OpNeg8) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| // match: (Add8 (Add8 i:(Const8 <t>) z) x) |
| // cond: (z.Op != OpConst8 && x.Op != OpConst8) |
| // result: (Add8 i (Add8 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAdd8 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst8 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst8 && x.Op != OpConst8) { |
| continue |
| } |
| v.reset(OpAdd8) |
| v0 := b.NewValue0(v.Pos, OpAdd8, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add8 (Sub8 i:(Const8 <t>) z) x) |
| // cond: (z.Op != OpConst8 && x.Op != OpConst8) |
| // result: (Add8 i (Sub8 <t> x z)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpSub8 { |
| continue |
| } |
| z := v_0.Args[1] |
| i := v_0.Args[0] |
| if i.Op != OpConst8 { |
| continue |
| } |
| t := i.Type |
| x := v_1 |
| if !(z.Op != OpConst8 && x.Op != OpConst8) { |
| continue |
| } |
| v.reset(OpAdd8) |
| v0 := b.NewValue0(v.Pos, OpSub8, t) |
| v0.AddArg2(x, z) |
| v.AddArg2(i, v0) |
| return true |
| } |
| break |
| } |
| // match: (Add8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) |
| // result: (Add8 (Const8 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpAdd8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst8 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt8(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAdd8) |
| v0 := b.NewValue0(v.Pos, OpConst8, t) |
| v0.AuxInt = int8ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| // match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x)) |
| // result: (Sub8 (Const8 <t> [c+d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpSub8 { |
| continue |
| } |
| x := v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst8 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt8(v_1_0.AuxInt) |
| v.reset(OpSub8) |
| v0 := b.NewValue0(v.Pos, OpConst8, t) |
| v0.AuxInt = int8ToAuxInt(c + d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAddPtr(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| // match: (AddPtr <t> x (Const64 [c])) |
| // result: (OffPtr <t> x [c]) |
| for { |
| t := v.Type |
| x := v_0 |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := auxIntToInt64(v_1.AuxInt) |
| v.reset(OpOffPtr) |
| v.Type = t |
| v.AuxInt = int64ToAuxInt(c) |
| v.AddArg(x) |
| return true |
| } |
| // match: (AddPtr <t> x (Const32 [c])) |
| // result: (OffPtr <t> x [int64(c)]) |
| for { |
| t := v.Type |
| x := v_0 |
| if v_1.Op != OpConst32 { |
| break |
| } |
| c := auxIntToInt32(v_1.AuxInt) |
| v.reset(OpOffPtr) |
| v.Type = t |
| v.AuxInt = int64ToAuxInt(int64(c)) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd16(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (And16 (Const16 [c]) (Const16 [d])) |
| // result: (Const16 [c&d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1.AuxInt) |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(c & d) |
| return true |
| } |
| break |
| } |
| // match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c]))) |
| // cond: c >= int64(16-ntz16(m)) |
| // result: (Const16 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| m := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpRsh16Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(16-ntz16(m))) { |
| continue |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c]))) |
| // cond: c >= int64(16-nlz16(m)) |
| // result: (Const16 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| m := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpLsh16x64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(16-nlz16(m))) { |
| continue |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And16 x x) |
| // result: x |
| for { |
| x := v_0 |
| if x != v_1 { |
| break |
| } |
| v.copyOf(x) |
| return true |
| } |
| // match: (And16 (Const16 [-1]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (And16 (Const16 [0]) _) |
| // result: (Const16 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { |
| continue |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And16 x (And16 x y)) |
| // result: (And16 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAnd16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAnd16) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And16 (And16 i:(Const16 <t>) z) x) |
| // cond: (z.Op != OpConst16 && x.Op != OpConst16) |
| // result: (And16 i (And16 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAnd16 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst16 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst16 && x.Op != OpConst16) { |
| continue |
| } |
| v.reset(OpAnd16) |
| v0 := b.NewValue0(v.Pos, OpAnd16, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And16 (Const16 <t> [c]) (And16 (Const16 <t> [d]) x)) |
| // result: (And16 (Const16 <t> [c&d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst16 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpAnd16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst16 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt16(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAnd16) |
| v0 := b.NewValue0(v.Pos, OpConst16, t) |
| v0.AuxInt = int16ToAuxInt(c & d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd32(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (And32 (Const32 [c]) (Const32 [d])) |
| // result: (Const32 [c&d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1.AuxInt) |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(c & d) |
| return true |
| } |
| break |
| } |
| // match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c]))) |
| // cond: c >= int64(32-ntz32(m)) |
| // result: (Const32 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| m := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpRsh32Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(32-ntz32(m))) { |
| continue |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c]))) |
| // cond: c >= int64(32-nlz32(m)) |
| // result: (Const32 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| m := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpLsh32x64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(32-nlz32(m))) { |
| continue |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And32 x x) |
| // result: x |
| for { |
| x := v_0 |
| if x != v_1 { |
| break |
| } |
| v.copyOf(x) |
| return true |
| } |
| // match: (And32 (Const32 [-1]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (And32 (Const32 [0]) _) |
| // result: (Const32 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { |
| continue |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And32 x (And32 x y)) |
| // result: (And32 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAnd32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAnd32) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And32 (And32 i:(Const32 <t>) z) x) |
| // cond: (z.Op != OpConst32 && x.Op != OpConst32) |
| // result: (And32 i (And32 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAnd32 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst32 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst32 && x.Op != OpConst32) { |
| continue |
| } |
| v.reset(OpAnd32) |
| v0 := b.NewValue0(v.Pos, OpAnd32, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And32 (Const32 <t> [c]) (And32 (Const32 <t> [d]) x)) |
| // result: (And32 (Const32 <t> [c&d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst32 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt32(v_0.AuxInt) |
| if v_1.Op != OpAnd32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst32 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt32(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAnd32) |
| v0 := b.NewValue0(v.Pos, OpConst32, t) |
| v0.AuxInt = int32ToAuxInt(c & d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd64(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (And64 (Const64 [c]) (Const64 [d])) |
| // result: (Const64 [c&d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1.AuxInt) |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(c & d) |
| return true |
| } |
| break |
| } |
| // match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c]))) |
| // cond: c >= int64(64-ntz64(m)) |
| // result: (Const64 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| m := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpRsh64Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(64-ntz64(m))) { |
| continue |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c]))) |
| // cond: c >= int64(64-nlz64(m)) |
| // result: (Const64 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| m := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpLsh64x64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(64-nlz64(m))) { |
| continue |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And64 x x) |
| // result: x |
| for { |
| x := v_0 |
| if x != v_1 { |
| break |
| } |
| v.copyOf(x) |
| return true |
| } |
| // match: (And64 (Const64 [-1]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (And64 (Const64 [0]) _) |
| // result: (Const64 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { |
| continue |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And64 x (And64 x y)) |
| // result: (And64 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAnd64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAnd64) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And64 (And64 i:(Const64 <t>) z) x) |
| // cond: (z.Op != OpConst64 && x.Op != OpConst64) |
| // result: (And64 i (And64 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAnd64 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst64 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst64 && x.Op != OpConst64) { |
| continue |
| } |
| v.reset(OpAnd64) |
| v0 := b.NewValue0(v.Pos, OpAnd64, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And64 (Const64 <t> [c]) (And64 (Const64 <t> [d]) x)) |
| // result: (And64 (Const64 <t> [c&d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst64 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt64(v_0.AuxInt) |
| if v_1.Op != OpAnd64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst64 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt64(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAnd64) |
| v0 := b.NewValue0(v.Pos, OpConst64, t) |
| v0.AuxInt = int64ToAuxInt(c & d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd8(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (And8 (Const8 [c]) (Const8 [d])) |
| // result: (Const8 [c&d]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1.AuxInt) |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(c & d) |
| return true |
| } |
| break |
| } |
| // match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c]))) |
| // cond: c >= int64(8-ntz8(m)) |
| // result: (Const8 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| m := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpRsh8Ux64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(8-ntz8(m))) { |
| continue |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c]))) |
| // cond: c >= int64(8-nlz8(m)) |
| // result: (Const8 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| m := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpLsh8x64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_1_1.AuxInt) |
| if !(c >= int64(8-nlz8(m))) { |
| continue |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And8 x x) |
| // result: x |
| for { |
| x := v_0 |
| if x != v_1 { |
| break |
| } |
| v.copyOf(x) |
| return true |
| } |
| // match: (And8 (Const8 [-1]) x) |
| // result: x |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_1 |
| v.copyOf(x) |
| return true |
| } |
| break |
| } |
| // match: (And8 (Const8 [0]) _) |
| // result: (Const8 [0]) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { |
| continue |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(0) |
| return true |
| } |
| break |
| } |
| // match: (And8 x (And8 x y)) |
| // result: (And8 x y) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| x := v_0 |
| if v_1.Op != OpAnd8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if x != v_1_0 { |
| continue |
| } |
| y := v_1_1 |
| v.reset(OpAnd8) |
| v.AddArg2(x, y) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And8 (And8 i:(Const8 <t>) z) x) |
| // cond: (z.Op != OpConst8 && x.Op != OpConst8) |
| // result: (And8 i (And8 <t> z x)) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpAnd8 { |
| continue |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 { |
| i := v_0_0 |
| if i.Op != OpConst8 { |
| continue |
| } |
| t := i.Type |
| z := v_0_1 |
| x := v_1 |
| if !(z.Op != OpConst8 && x.Op != OpConst8) { |
| continue |
| } |
| v.reset(OpAnd8) |
| v0 := b.NewValue0(v.Pos, OpAnd8, t) |
| v0.AddArg2(z, x) |
| v.AddArg2(i, v0) |
| return true |
| } |
| } |
| break |
| } |
| // match: (And8 (Const8 <t> [c]) (And8 (Const8 <t> [d]) x)) |
| // result: (And8 (Const8 <t> [c&d]) x) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpConst8 { |
| continue |
| } |
| t := v_0.Type |
| c := auxIntToInt8(v_0.AuxInt) |
| if v_1.Op != OpAnd8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| v_1_0 := v_1.Args[0] |
| v_1_1 := v_1.Args[1] |
| for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 { |
| if v_1_0.Op != OpConst8 || v_1_0.Type != t { |
| continue |
| } |
| d := auxIntToInt8(v_1_0.AuxInt) |
| x := v_1_1 |
| v.reset(OpAnd8) |
| v0 := b.NewValue0(v.Pos, OpConst8, t) |
| v0.AuxInt = int8ToAuxInt(c & d) |
| v.AddArg2(v0, x) |
| return true |
| } |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAndB(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| // match: (AndB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d]))) |
| // cond: d >= c |
| // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq64 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLess64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLess64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d]))) |
| // cond: d >= c |
| // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq64 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLeq64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLeq64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d]))) |
| // cond: d >= c |
| // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq32 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLess32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLess32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d]))) |
| // cond: d >= c |
| // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq32 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLeq32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLeq32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d]))) |
| // cond: d >= c |
| // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq16 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLess16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLess16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d]))) |
| // cond: d >= c |
| // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq16 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLeq16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLeq16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d]))) |
| // cond: d >= c |
| // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq8 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLess8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLess8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d]))) |
| // cond: d >= c |
| // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq8 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLeq8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(d >= c) { |
| continue |
| } |
| v.reset(OpLeq8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess64 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLess64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLess64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess64 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLeq64 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLeq64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess32 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLess32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLess32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess32 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLeq32 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLeq32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess16 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLess16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLess16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess16 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLeq16 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLeq16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess8 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLess8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLess8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d]))) |
| // cond: d >= c+1 && c+1 > c |
| // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess8 { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLeq8 { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(d >= c+1 && c+1 > c) { |
| continue |
| } |
| v.reset(OpLeq8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d]))) |
| // cond: uint64(d) >= uint64(c) |
| // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq64U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLess64U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(uint64(d) >= uint64(c)) { |
| continue |
| } |
| v.reset(OpLess64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d]))) |
| // cond: uint64(d) >= uint64(c) |
| // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq64U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLeq64U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(uint64(d) >= uint64(c)) { |
| continue |
| } |
| v.reset(OpLeq64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d]))) |
| // cond: uint32(d) >= uint32(c) |
| // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq32U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLess32U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(uint32(d) >= uint32(c)) { |
| continue |
| } |
| v.reset(OpLess32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d]))) |
| // cond: uint32(d) >= uint32(c) |
| // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq32U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLeq32U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(uint32(d) >= uint32(c)) { |
| continue |
| } |
| v.reset(OpLeq32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d]))) |
| // cond: uint16(d) >= uint16(c) |
| // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq16U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLess16U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(uint16(d) >= uint16(c)) { |
| continue |
| } |
| v.reset(OpLess16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d]))) |
| // cond: uint16(d) >= uint16(c) |
| // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq16U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLeq16U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(uint16(d) >= uint16(c)) { |
| continue |
| } |
| v.reset(OpLeq16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d]))) |
| // cond: uint8(d) >= uint8(c) |
| // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq8U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLess8U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(uint8(d) >= uint8(c)) { |
| continue |
| } |
| v.reset(OpLess8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d]))) |
| // cond: uint8(d) >= uint8(c) |
| // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLeq8U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLeq8U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(uint8(d) >= uint8(c)) { |
| continue |
| } |
| v.reset(OpLeq8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d]))) |
| // cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c) |
| // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess64U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLess64U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) { |
| continue |
| } |
| v.reset(OpLess64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d]))) |
| // cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c) |
| // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess64U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| continue |
| } |
| c := auxIntToInt64(v_0_0.AuxInt) |
| if v_1.Op != OpLeq64U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst64 { |
| continue |
| } |
| d := auxIntToInt64(v_1_1.AuxInt) |
| if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) { |
| continue |
| } |
| v.reset(OpLeq64U) |
| v0 := b.NewValue0(v.Pos, OpSub64, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v1.AuxInt = int64ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, x.Type) |
| v2.AuxInt = int64ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d]))) |
| // cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c) |
| // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess32U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLess32U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) { |
| continue |
| } |
| v.reset(OpLess32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d]))) |
| // cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c) |
| // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess32U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| continue |
| } |
| c := auxIntToInt32(v_0_0.AuxInt) |
| if v_1.Op != OpLeq32U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst32 { |
| continue |
| } |
| d := auxIntToInt32(v_1_1.AuxInt) |
| if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) { |
| continue |
| } |
| v.reset(OpLeq32U) |
| v0 := b.NewValue0(v.Pos, OpSub32, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v1.AuxInt = int32ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, x.Type) |
| v2.AuxInt = int32ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d]))) |
| // cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c) |
| // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess16U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLess16U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) { |
| continue |
| } |
| v.reset(OpLess16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d]))) |
| // cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c) |
| // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess16U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst16 { |
| continue |
| } |
| c := auxIntToInt16(v_0_0.AuxInt) |
| if v_1.Op != OpLeq16U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst16 { |
| continue |
| } |
| d := auxIntToInt16(v_1_1.AuxInt) |
| if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) { |
| continue |
| } |
| v.reset(OpLeq16U) |
| v0 := b.NewValue0(v.Pos, OpSub16, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v1.AuxInt = int16ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst16, x.Type) |
| v2.AuxInt = int16ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d]))) |
| // cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c) |
| // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess8U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLess8U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) { |
| continue |
| } |
| v.reset(OpLess8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| // match: (AndB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d]))) |
| // cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c) |
| // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1])) |
| for { |
| for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { |
| if v_0.Op != OpLess8U { |
| continue |
| } |
| x := v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst8 { |
| continue |
| } |
| c := auxIntToInt8(v_0_0.AuxInt) |
| if v_1.Op != OpLeq8U { |
| continue |
| } |
| _ = v_1.Args[1] |
| if x != v_1.Args[0] { |
| continue |
| } |
| v_1_1 := v_1.Args[1] |
| if v_1_1.Op != OpConst8 { |
| continue |
| } |
| d := auxIntToInt8(v_1_1.AuxInt) |
| if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) { |
| continue |
| } |
| v.reset(OpLeq8U) |
| v0 := b.NewValue0(v.Pos, OpSub8, x.Type) |
| v1 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v1.AuxInt = int8ToAuxInt(c + 1) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst8, x.Type) |
| v2.AuxInt = int8ToAuxInt(d - c - 1) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpArraySelect(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (ArraySelect (ArrayMake1 x)) |
| // result: x |
| for { |
| if v_0.Op != OpArrayMake1 { |
| break |
| } |
| x := v_0.Args[0] |
| v.copyOf(x) |
| return true |
| } |
| // match: (ArraySelect [0] (IData x)) |
| // result: (IData x) |
| for { |
| if auxIntToInt64(v.AuxInt) != 0 || v_0.Op != OpIData { |
| break |
| } |
| x := v_0.Args[0] |
| v.reset(OpIData) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCom16(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Com16 (Com16 x)) |
| // result: x |
| for { |
| if v_0.Op != OpCom16 { |
| break |
| } |
| x := v_0.Args[0] |
| v.copyOf(x) |
| return true |
| } |
| // match: (Com16 (Const16 [c])) |
| // result: (Const16 [^c]) |
| for { |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(^c) |
| return true |
| } |
| // match: (Com16 (Add16 (Const16 [-1]) x)) |
| // result: (Neg16 x) |
| for { |
| if v_0.Op != OpAdd16 { |
| break |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { |
| if v_0_0.Op != OpConst16 || auxIntToInt16(v_0_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_0_1 |
| v.reset(OpNeg16) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCom32(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Com32 (Com32 x)) |
| // result: x |
| for { |
| if v_0.Op != OpCom32 { |
| break |
| } |
| x := v_0.Args[0] |
| v.copyOf(x) |
| return true |
| } |
| // match: (Com32 (Const32 [c])) |
| // result: (Const32 [^c]) |
| for { |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(^c) |
| return true |
| } |
| // match: (Com32 (Add32 (Const32 [-1]) x)) |
| // result: (Neg32 x) |
| for { |
| if v_0.Op != OpAdd32 { |
| break |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { |
| if v_0_0.Op != OpConst32 || auxIntToInt32(v_0_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_0_1 |
| v.reset(OpNeg32) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCom64(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Com64 (Com64 x)) |
| // result: x |
| for { |
| if v_0.Op != OpCom64 { |
| break |
| } |
| x := v_0.Args[0] |
| v.copyOf(x) |
| return true |
| } |
| // match: (Com64 (Const64 [c])) |
| // result: (Const64 [^c]) |
| for { |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(^c) |
| return true |
| } |
| // match: (Com64 (Add64 (Const64 [-1]) x)) |
| // result: (Neg64 x) |
| for { |
| if v_0.Op != OpAdd64 { |
| break |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { |
| if v_0_0.Op != OpConst64 || auxIntToInt64(v_0_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_0_1 |
| v.reset(OpNeg64) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCom8(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Com8 (Com8 x)) |
| // result: x |
| for { |
| if v_0.Op != OpCom8 { |
| break |
| } |
| x := v_0.Args[0] |
| v.copyOf(x) |
| return true |
| } |
| // match: (Com8 (Const8 [c])) |
| // result: (Const8 [^c]) |
| for { |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := auxIntToInt8(v_0.AuxInt) |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(^c) |
| return true |
| } |
| // match: (Com8 (Add8 (Const8 [-1]) x)) |
| // result: (Neg8 x) |
| for { |
| if v_0.Op != OpAdd8 { |
| break |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { |
| if v_0_0.Op != OpConst8 || auxIntToInt8(v_0_0.AuxInt) != -1 { |
| continue |
| } |
| x := v_0_1 |
| v.reset(OpNeg8) |
| v.AddArg(x) |
| return true |
| } |
| break |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpConstInterface(v *Value) bool { |
| b := v.Block |
| typ := &b.Func.Config.Types |
| // match: (ConstInterface) |
| // result: (IMake (ConstNil <typ.Uintptr>) (ConstNil <typ.BytePtr>)) |
| for { |
| v.reset(OpIMake) |
| v0 := b.NewValue0(v.Pos, OpConstNil, typ.Uintptr) |
| v1 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr) |
| v.AddArg2(v0, v1) |
| return true |
| } |
| } |
| func rewriteValuegeneric_OpConstSlice(v *Value) bool { |
| b := v.Block |
| config := b.Func.Config |
| typ := &b.Func.Config.Types |
| // match: (ConstSlice) |
| // cond: config.PtrSize == 4 |
| // result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const32 <typ.Int> [0]) (Const32 <typ.Int> [0])) |
| for { |
| if !(config.PtrSize == 4) { |
| break |
| } |
| v.reset(OpSliceMake) |
| v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo()) |
| v1 := b.NewValue0(v.Pos, OpConst32, typ.Int) |
| v1.AuxInt = int32ToAuxInt(0) |
| v.AddArg3(v0, v1, v1) |
| return true |
| } |
| // match: (ConstSlice) |
| // cond: config.PtrSize == 8 |
| // result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const64 <typ.Int> [0]) (Const64 <typ.Int> [0])) |
| for { |
| if !(config.PtrSize == 8) { |
| break |
| } |
| v.reset(OpSliceMake) |
| v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo()) |
| v1 := b.NewValue0(v.Pos, OpConst64, typ.Int) |
| v1.AuxInt = int64ToAuxInt(0) |
| v.AddArg3(v0, v1, v1) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpConstString(v *Value) bool { |
| b := v.Block |
| config := b.Func.Config |
| fe := b.Func.fe |
| typ := &b.Func.Config.Types |
| // match: (ConstString {str}) |
| // cond: config.PtrSize == 4 && str == "" |
| // result: (StringMake (ConstNil) (Const32 <typ.Int> [0])) |
| for { |
| str := auxToString(v.Aux) |
| if !(config.PtrSize == 4 && str == "") { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr) |
| v1 := b.NewValue0(v.Pos, OpConst32, typ.Int) |
| v1.AuxInt = int32ToAuxInt(0) |
| v.AddArg2(v0, v1) |
| return true |
| } |
| // match: (ConstString {str}) |
| // cond: config.PtrSize == 8 && str == "" |
| // result: (StringMake (ConstNil) (Const64 <typ.Int> [0])) |
| for { |
| str := auxToString(v.Aux) |
| if !(config.PtrSize == 8 && str == "") { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr) |
| v1 := b.NewValue0(v.Pos, OpConst64, typ.Int) |
| v1.AuxInt = int64ToAuxInt(0) |
| v.AddArg2(v0, v1) |
| return true |
| } |
| // match: (ConstString {str}) |
| // cond: config.PtrSize == 4 && str != "" |
| // result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const32 <typ.Int> [int32(len(str))])) |
| for { |
| str := auxToString(v.Aux) |
| if !(config.PtrSize == 4 && str != "") { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr) |
| v0.Aux = symToAux(fe.StringData(str)) |
| v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr) |
| v0.AddArg(v1) |
| v2 := b.NewValue0(v.Pos, OpConst32, typ.Int) |
| v2.AuxInt = int32ToAuxInt(int32(len(str))) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| // match: (ConstString {str}) |
| // cond: config.PtrSize == 8 && str != "" |
| // result: (StringMake (Addr <typ.BytePtr> {fe.StringData(str)} (SB)) (Const64 <typ.Int> [int64(len(str))])) |
| for { |
| str := auxToString(v.Aux) |
| if !(config.PtrSize == 8 && str != "") { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr) |
| v0.Aux = symToAux(fe.StringData(str)) |
| v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr) |
| v0.AddArg(v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, typ.Int) |
| v2.AuxInt = int64ToAuxInt(int64(len(str))) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpConvert(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| // match: (Convert (Add64 (Convert ptr mem) off) mem) |
| // result: (AddPtr ptr off) |
| for { |
| if v_0.Op != OpAdd64 { |
| break |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { |
| if v_0_0.Op != OpConvert { |
| continue |
| } |
| mem := v_0_0.Args[1] |
| ptr := v_0_0.Args[0] |
| off := v_0_1 |
| if mem != v_1 { |
| continue |
| } |
| v.reset(OpAddPtr) |
| v.AddArg2(ptr, off) |
| return true |
| } |
| break |
| } |
| // match: (Convert (Add32 (Convert ptr mem) off) mem) |
| // result: (AddPtr ptr off) |
| for { |
| if v_0.Op != OpAdd32 { |
| break |
| } |
| _ = v_0.Args[1] |
| v_0_0 := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { |
| if v_0_0.Op != OpConvert { |
| continue |
| } |
| mem := v_0_0.Args[1] |
| ptr := v_0_0.Args[0] |
| off := v_0_1 |
| if mem != v_1 { |
| continue |
| } |
| v.reset(OpAddPtr) |
| v.AddArg2(ptr, off) |
| return true |
| } |
| break |
| } |
| // match: (Convert (Convert ptr mem) mem) |
| // result: ptr |
| for { |
| if v_0.Op != OpConvert { |
| break |
| } |
| mem := v_0.Args[1] |
| ptr := v_0.Args[0] |
| if mem != v_1 { |
| break |
| } |
| v.copyOf(ptr) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCtz16(v *Value) bool { |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| // match: (Ctz16 (Const16 [c])) |
| // cond: config.PtrSize == 4 |
| // result: (Const32 [int32(ntz16(c))]) |
| for { |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| if !(config.PtrSize == 4) { |
| break |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(int32(ntz16(c))) |
| return true |
| } |
| // match: (Ctz16 (Const16 [c])) |
| // cond: config.PtrSize == 8 |
| // result: (Const64 [int64(ntz16(c))]) |
| for { |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| if !(config.PtrSize == 8) { |
| break |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(int64(ntz16(c))) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCtz32(v *Value) bool { |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| // match: (Ctz32 (Const32 [c])) |
| // cond: config.PtrSize == 4 |
| // result: (Const32 [int32(ntz32(c))]) |
| for { |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| if !(config.PtrSize == 4) { |
| break |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(int32(ntz32(c))) |
| return true |
| } |
| // match: (Ctz32 (Const32 [c])) |
| // cond: config.PtrSize == 8 |
| // result: (Const64 [int64(ntz32(c))]) |
| for { |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| if !(config.PtrSize == 8) { |
| break |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(int64(ntz32(c))) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCtz64(v *Value) bool { |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| // match: (Ctz64 (Const64 [c])) |
| // cond: config.PtrSize == 4 |
| // result: (Const32 [int32(ntz64(c))]) |
| for { |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| if !(config.PtrSize == 4) { |
| break |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(int32(ntz64(c))) |
| return true |
| } |
| // match: (Ctz64 (Const64 [c])) |
| // cond: config.PtrSize == 8 |
| // result: (Const64 [int64(ntz64(c))]) |
| for { |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| if !(config.PtrSize == 8) { |
| break |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(int64(ntz64(c))) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCtz8(v *Value) bool { |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| // match: (Ctz8 (Const8 [c])) |
| // cond: config.PtrSize == 4 |
| // result: (Const32 [int32(ntz8(c))]) |
| for { |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := auxIntToInt8(v_0.AuxInt) |
| if !(config.PtrSize == 4) { |
| break |
| } |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(int32(ntz8(c))) |
| return true |
| } |
| // match: (Ctz8 (Const8 [c])) |
| // cond: config.PtrSize == 8 |
| // result: (Const64 [int64(ntz8(c))]) |
| for { |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := auxIntToInt8(v_0.AuxInt) |
| if !(config.PtrSize == 8) { |
| break |
| } |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(int64(ntz8(c))) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt32Fto32(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt32Fto32 (Const32F [c])) |
| // result: (Const32 [int32(c)]) |
| for { |
| if v_0.Op != OpConst32F { |
| break |
| } |
| c := auxIntToFloat32(v_0.AuxInt) |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(int32(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt32Fto64(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt32Fto64 (Const32F [c])) |
| // result: (Const64 [int64(c)]) |
| for { |
| if v_0.Op != OpConst32F { |
| break |
| } |
| c := auxIntToFloat32(v_0.AuxInt) |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(int64(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt32Fto64F(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt32Fto64F (Const32F [c])) |
| // result: (Const64F [float64(c)]) |
| for { |
| if v_0.Op != OpConst32F { |
| break |
| } |
| c := auxIntToFloat32(v_0.AuxInt) |
| v.reset(OpConst64F) |
| v.AuxInt = float64ToAuxInt(float64(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt32to32F(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt32to32F (Const32 [c])) |
| // result: (Const32F [float32(c)]) |
| for { |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| v.reset(OpConst32F) |
| v.AuxInt = float32ToAuxInt(float32(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt32to64F(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt32to64F (Const32 [c])) |
| // result: (Const64F [float64(c)]) |
| for { |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := auxIntToInt32(v_0.AuxInt) |
| v.reset(OpConst64F) |
| v.AuxInt = float64ToAuxInt(float64(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt64Fto32(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt64Fto32 (Const64F [c])) |
| // result: (Const32 [int32(c)]) |
| for { |
| if v_0.Op != OpConst64F { |
| break |
| } |
| c := auxIntToFloat64(v_0.AuxInt) |
| v.reset(OpConst32) |
| v.AuxInt = int32ToAuxInt(int32(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt64Fto32F(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt64Fto32F (Const64F [c])) |
| // result: (Const32F [float32(c)]) |
| for { |
| if v_0.Op != OpConst64F { |
| break |
| } |
| c := auxIntToFloat64(v_0.AuxInt) |
| v.reset(OpConst32F) |
| v.AuxInt = float32ToAuxInt(float32(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt64Fto64(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt64Fto64 (Const64F [c])) |
| // result: (Const64 [int64(c)]) |
| for { |
| if v_0.Op != OpConst64F { |
| break |
| } |
| c := auxIntToFloat64(v_0.AuxInt) |
| v.reset(OpConst64) |
| v.AuxInt = int64ToAuxInt(int64(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt64to32F(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt64to32F (Const64 [c])) |
| // result: (Const32F [float32(c)]) |
| for { |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| v.reset(OpConst32F) |
| v.AuxInt = float32ToAuxInt(float32(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt64to64F(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (Cvt64to64F (Const64 [c])) |
| // result: (Const64F [float64(c)]) |
| for { |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := auxIntToInt64(v_0.AuxInt) |
| v.reset(OpConst64F) |
| v.AuxInt = float64ToAuxInt(float64(c)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvtBoolToUint8(v *Value) bool { |
| v_0 := v.Args[0] |
| // match: (CvtBoolToUint8 (ConstBool [false])) |
| // result: (Const8 [0]) |
| for { |
| if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != false { |
| break |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(0) |
| return true |
| } |
| // match: (CvtBoolToUint8 (ConstBool [true])) |
| // result: (Const8 [1]) |
| for { |
| if v_0.Op != OpConstBool || auxIntToBool(v_0.AuxInt) != true { |
| break |
| } |
| v.reset(OpConst8) |
| v.AuxInt = int8ToAuxInt(1) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpDiv16(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| typ := &b.Func.Config.Types |
| // match: (Div16 (Const16 [c]) (Const16 [d])) |
| // cond: d != 0 |
| // result: (Const16 [c/d]) |
| for { |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := auxIntToInt16(v_1.AuxInt) |
| if !(d != 0) { |
| break |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(c / d) |
| return true |
| } |
| // match: (Div16 n (Const16 [c])) |
| // cond: isNonNegative(n) && isPowerOfTwo16(c) |
| // result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)])) |
| for { |
| n := v_0 |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_1.AuxInt) |
| if !(isNonNegative(n) && isPowerOfTwo16(c)) { |
| break |
| } |
| v.reset(OpRsh16Ux64) |
| v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v0.AuxInt = int64ToAuxInt(log16(c)) |
| v.AddArg2(n, v0) |
| return true |
| } |
| // match: (Div16 <t> n (Const16 [c])) |
| // cond: c < 0 && c != -1<<15 |
| // result: (Neg16 (Div16 <t> n (Const16 <t> [-c]))) |
| for { |
| t := v.Type |
| n := v_0 |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_1.AuxInt) |
| if !(c < 0 && c != -1<<15) { |
| break |
| } |
| v.reset(OpNeg16) |
| v0 := b.NewValue0(v.Pos, OpDiv16, t) |
| v1 := b.NewValue0(v.Pos, OpConst16, t) |
| v1.AuxInt = int16ToAuxInt(-c) |
| v0.AddArg2(n, v1) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Div16 <t> x (Const16 [-1<<15])) |
| // result: (Rsh16Ux64 (And16 <t> x (Neg16 <t> x)) (Const64 <typ.UInt64> [15])) |
| for { |
| t := v.Type |
| x := v_0 |
| if v_1.Op != OpConst16 || auxIntToInt16(v_1.AuxInt) != -1<<15 { |
| break |
| } |
| v.reset(OpRsh16Ux64) |
| v0 := b.NewValue0(v.Pos, OpAnd16, t) |
| v1 := b.NewValue0(v.Pos, OpNeg16, t) |
| v1.AddArg(x) |
| v0.AddArg2(x, v1) |
| v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v2.AuxInt = int64ToAuxInt(15) |
| v.AddArg2(v0, v2) |
| return true |
| } |
| // match: (Div16 <t> n (Const16 [c])) |
| // cond: isPowerOfTwo16(c) |
| // result: (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [int64(16-log16(c))]))) (Const64 <typ.UInt64> [int64(log16(c))])) |
| for { |
| t := v.Type |
| n := v_0 |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_1.AuxInt) |
| if !(isPowerOfTwo16(c)) { |
| break |
| } |
| v.reset(OpRsh16x64) |
| v0 := b.NewValue0(v.Pos, OpAdd16, t) |
| v1 := b.NewValue0(v.Pos, OpRsh16Ux64, t) |
| v2 := b.NewValue0(v.Pos, OpRsh16x64, t) |
| v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v3.AuxInt = int64ToAuxInt(15) |
| v2.AddArg2(n, v3) |
| v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v4.AuxInt = int64ToAuxInt(int64(16 - log16(c))) |
| v1.AddArg2(v2, v4) |
| v0.AddArg2(n, v1) |
| v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v5.AuxInt = int64ToAuxInt(int64(log16(c))) |
| v.AddArg2(v0, v5) |
| return true |
| } |
| // match: (Div16 <t> x (Const16 [c])) |
| // cond: smagicOK16(c) |
| // result: (Sub16 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(smagic16(c).m)]) (SignExt16to32 x)) (Const64 <typ.UInt64> [16+smagic16(c).s])) (Rsh32x64 <t> (SignExt16to32 x) (Const64 <typ.UInt64> [31]))) |
| for { |
| t := v.Type |
| x := v_0 |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_1.AuxInt) |
| if !(smagicOK16(c)) { |
| break |
| } |
| v.reset(OpSub16) |
| v.Type = t |
| v0 := b.NewValue0(v.Pos, OpRsh32x64, t) |
| v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) |
| v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) |
| v2.AuxInt = int32ToAuxInt(int32(smagic16(c).m)) |
| v3 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) |
| v3.AddArg(x) |
| v1.AddArg2(v2, v3) |
| v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v4.AuxInt = int64ToAuxInt(16 + smagic16(c).s) |
| v0.AddArg2(v1, v4) |
| v5 := b.NewValue0(v.Pos, OpRsh32x64, t) |
| v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v6.AuxInt = int64ToAuxInt(31) |
| v5.AddArg2(v3, v6) |
| v.AddArg2(v0, v5) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpDiv16u(v *Value) bool { |
| v_1 := v.Args[1] |
| v_0 := v.Args[0] |
| b := v.Block |
| config := b.Func.Config |
| typ := &b.Func.Config.Types |
| // match: (Div16u (Const16 [c]) (Const16 [d])) |
| // cond: d != 0 |
| // result: (Const16 [int16(uint16(c)/uint16(d))]) |
| for { |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_0.AuxInt) |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := auxIntToInt16(v_1.AuxInt) |
| if !(d != 0) { |
| break |
| } |
| v.reset(OpConst16) |
| v.AuxInt = int16ToAuxInt(int16(uint16(c) / uint16(d))) |
| return true |
| } |
| // match: (Div16u n (Const16 [c])) |
| // cond: isPowerOfTwo16(c) |
| // result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log16(c)])) |
| for { |
| n := v_0 |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_1.AuxInt) |
| if !(isPowerOfTwo16(c)) { |
| break |
| } |
| v.reset(OpRsh16Ux64) |
| v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v0.AuxInt = int64ToAuxInt(log16(c)) |
| v.AddArg2(n, v0) |
| return true |
| } |
| // match: (Div16u x (Const16 [c])) |
| // cond: umagicOK16(c) && config.RegSize == 8 |
| // result: (Trunc64to16 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<16+umagic16(c).m)]) (ZeroExt16to64 x)) (Const64 <typ.UInt64> [16+umagic16(c).s]))) |
| for { |
| x := v_0 |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_1.AuxInt) |
| if !(umagicOK16(c) && config.RegSize == 8) { |
| break |
| } |
| v.reset(OpTrunc64to16) |
| v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64) |
| v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64) |
| v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v2.AuxInt = int64ToAuxInt(int64(1<<16 + umagic16(c).m)) |
| v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) |
| v3.AddArg(x) |
| v1.AddArg2(v2, v3) |
| v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s) |
| v0.AddArg2(v1, v4) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Div16u x (Const16 [c])) |
| // cond: umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0 |
| // result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+umagic16(c).m/2)]) (ZeroExt16to32 x)) (Const64 <typ.UInt64> [16+umagic16(c).s-1]))) |
| for { |
| x := v_0 |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_1.AuxInt) |
| if !(umagicOK16(c) && config.RegSize == 4 && umagic16(c).m&1 == 0) { |
| break |
| } |
| v.reset(OpTrunc32to16) |
| v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) |
| v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) |
| v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) |
| v2.AuxInt = int32ToAuxInt(int32(1<<15 + umagic16(c).m/2)) |
| v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) |
| v3.AddArg(x) |
| v1.AddArg2(v2, v3) |
| v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v4.AuxInt = int64ToAuxInt(16 + umagic16(c).s - 1) |
| v0.AddArg2(v1, v4) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Div16u x (Const16 [c])) |
| // cond: umagicOK16(c) && config.RegSize == 4 && c&1 == 0 |
| // result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int32(1<<15+(umagic16(c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [16+umagic16(c).s-2]))) |
| for { |
| x := v_0 |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := auxIntToInt16(v_1.AuxInt) |
| if !(umagicOK16(c) && config.RegSize == 4 && c&1 == 0) { |
| break |
| } |
| v.reset(OpTrunc32to16) |
| v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) |
| v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32) |
| v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32) |
| v2.AuxInt = int32ToAuxInt(int32(1<<15 + (umagic16(c).m+1)/2)) |
| v3 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32) |
| v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) |
| v4.AddArg(x) |
| v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) |
| v5.AuxInt = int64ToAuxInt(1) |
| v3.AddArg2(v4, v5) |
|