| // autogenerated from gen/generic.rules: do not edit! |
| // generated with: cd gen; go run *.go |
| |
| package ssa |
| |
| import "math" |
| |
| var _ = math.MinInt8 // in case not otherwise used |
| func rewriteValuegeneric(v *Value, config *Config) bool { |
| switch v.Op { |
| case OpAdd16: |
| return rewriteValuegeneric_OpAdd16(v, config) |
| case OpAdd32: |
| return rewriteValuegeneric_OpAdd32(v, config) |
| case OpAdd32F: |
| return rewriteValuegeneric_OpAdd32F(v, config) |
| case OpAdd64: |
| return rewriteValuegeneric_OpAdd64(v, config) |
| case OpAdd64F: |
| return rewriteValuegeneric_OpAdd64F(v, config) |
| case OpAdd8: |
| return rewriteValuegeneric_OpAdd8(v, config) |
| case OpAddPtr: |
| return rewriteValuegeneric_OpAddPtr(v, config) |
| case OpAnd16: |
| return rewriteValuegeneric_OpAnd16(v, config) |
| case OpAnd32: |
| return rewriteValuegeneric_OpAnd32(v, config) |
| case OpAnd64: |
| return rewriteValuegeneric_OpAnd64(v, config) |
| case OpAnd8: |
| return rewriteValuegeneric_OpAnd8(v, config) |
| case OpArg: |
| return rewriteValuegeneric_OpArg(v, config) |
| case OpArraySelect: |
| return rewriteValuegeneric_OpArraySelect(v, config) |
| case OpCom16: |
| return rewriteValuegeneric_OpCom16(v, config) |
| case OpCom32: |
| return rewriteValuegeneric_OpCom32(v, config) |
| case OpCom64: |
| return rewriteValuegeneric_OpCom64(v, config) |
| case OpCom8: |
| return rewriteValuegeneric_OpCom8(v, config) |
| case OpConstInterface: |
| return rewriteValuegeneric_OpConstInterface(v, config) |
| case OpConstSlice: |
| return rewriteValuegeneric_OpConstSlice(v, config) |
| case OpConstString: |
| return rewriteValuegeneric_OpConstString(v, config) |
| case OpConvert: |
| return rewriteValuegeneric_OpConvert(v, config) |
| case OpCvt32Fto64F: |
| return rewriteValuegeneric_OpCvt32Fto64F(v, config) |
| case OpCvt64Fto32F: |
| return rewriteValuegeneric_OpCvt64Fto32F(v, config) |
| case OpDiv32F: |
| return rewriteValuegeneric_OpDiv32F(v, config) |
| case OpDiv64: |
| return rewriteValuegeneric_OpDiv64(v, config) |
| case OpDiv64F: |
| return rewriteValuegeneric_OpDiv64F(v, config) |
| case OpDiv64u: |
| return rewriteValuegeneric_OpDiv64u(v, config) |
| case OpEq16: |
| return rewriteValuegeneric_OpEq16(v, config) |
| case OpEq32: |
| return rewriteValuegeneric_OpEq32(v, config) |
| case OpEq64: |
| return rewriteValuegeneric_OpEq64(v, config) |
| case OpEq8: |
| return rewriteValuegeneric_OpEq8(v, config) |
| case OpEqB: |
| return rewriteValuegeneric_OpEqB(v, config) |
| case OpEqInter: |
| return rewriteValuegeneric_OpEqInter(v, config) |
| case OpEqPtr: |
| return rewriteValuegeneric_OpEqPtr(v, config) |
| case OpEqSlice: |
| return rewriteValuegeneric_OpEqSlice(v, config) |
| case OpGeq16: |
| return rewriteValuegeneric_OpGeq16(v, config) |
| case OpGeq16U: |
| return rewriteValuegeneric_OpGeq16U(v, config) |
| case OpGeq32: |
| return rewriteValuegeneric_OpGeq32(v, config) |
| case OpGeq32U: |
| return rewriteValuegeneric_OpGeq32U(v, config) |
| case OpGeq64: |
| return rewriteValuegeneric_OpGeq64(v, config) |
| case OpGeq64U: |
| return rewriteValuegeneric_OpGeq64U(v, config) |
| case OpGeq8: |
| return rewriteValuegeneric_OpGeq8(v, config) |
| case OpGeq8U: |
| return rewriteValuegeneric_OpGeq8U(v, config) |
| case OpGreater16: |
| return rewriteValuegeneric_OpGreater16(v, config) |
| case OpGreater16U: |
| return rewriteValuegeneric_OpGreater16U(v, config) |
| case OpGreater32: |
| return rewriteValuegeneric_OpGreater32(v, config) |
| case OpGreater32U: |
| return rewriteValuegeneric_OpGreater32U(v, config) |
| case OpGreater64: |
| return rewriteValuegeneric_OpGreater64(v, config) |
| case OpGreater64U: |
| return rewriteValuegeneric_OpGreater64U(v, config) |
| case OpGreater8: |
| return rewriteValuegeneric_OpGreater8(v, config) |
| case OpGreater8U: |
| return rewriteValuegeneric_OpGreater8U(v, config) |
| case OpIMake: |
| return rewriteValuegeneric_OpIMake(v, config) |
| case OpIsInBounds: |
| return rewriteValuegeneric_OpIsInBounds(v, config) |
| case OpIsSliceInBounds: |
| return rewriteValuegeneric_OpIsSliceInBounds(v, config) |
| case OpLeq16: |
| return rewriteValuegeneric_OpLeq16(v, config) |
| case OpLeq16U: |
| return rewriteValuegeneric_OpLeq16U(v, config) |
| case OpLeq32: |
| return rewriteValuegeneric_OpLeq32(v, config) |
| case OpLeq32U: |
| return rewriteValuegeneric_OpLeq32U(v, config) |
| case OpLeq64: |
| return rewriteValuegeneric_OpLeq64(v, config) |
| case OpLeq64U: |
| return rewriteValuegeneric_OpLeq64U(v, config) |
| case OpLeq8: |
| return rewriteValuegeneric_OpLeq8(v, config) |
| case OpLeq8U: |
| return rewriteValuegeneric_OpLeq8U(v, config) |
| case OpLess16: |
| return rewriteValuegeneric_OpLess16(v, config) |
| case OpLess16U: |
| return rewriteValuegeneric_OpLess16U(v, config) |
| case OpLess32: |
| return rewriteValuegeneric_OpLess32(v, config) |
| case OpLess32U: |
| return rewriteValuegeneric_OpLess32U(v, config) |
| case OpLess64: |
| return rewriteValuegeneric_OpLess64(v, config) |
| case OpLess64U: |
| return rewriteValuegeneric_OpLess64U(v, config) |
| case OpLess8: |
| return rewriteValuegeneric_OpLess8(v, config) |
| case OpLess8U: |
| return rewriteValuegeneric_OpLess8U(v, config) |
| case OpLoad: |
| return rewriteValuegeneric_OpLoad(v, config) |
| case OpLsh16x16: |
| return rewriteValuegeneric_OpLsh16x16(v, config) |
| case OpLsh16x32: |
| return rewriteValuegeneric_OpLsh16x32(v, config) |
| case OpLsh16x64: |
| return rewriteValuegeneric_OpLsh16x64(v, config) |
| case OpLsh16x8: |
| return rewriteValuegeneric_OpLsh16x8(v, config) |
| case OpLsh32x16: |
| return rewriteValuegeneric_OpLsh32x16(v, config) |
| case OpLsh32x32: |
| return rewriteValuegeneric_OpLsh32x32(v, config) |
| case OpLsh32x64: |
| return rewriteValuegeneric_OpLsh32x64(v, config) |
| case OpLsh32x8: |
| return rewriteValuegeneric_OpLsh32x8(v, config) |
| case OpLsh64x16: |
| return rewriteValuegeneric_OpLsh64x16(v, config) |
| case OpLsh64x32: |
| return rewriteValuegeneric_OpLsh64x32(v, config) |
| case OpLsh64x64: |
| return rewriteValuegeneric_OpLsh64x64(v, config) |
| case OpLsh64x8: |
| return rewriteValuegeneric_OpLsh64x8(v, config) |
| case OpLsh8x16: |
| return rewriteValuegeneric_OpLsh8x16(v, config) |
| case OpLsh8x32: |
| return rewriteValuegeneric_OpLsh8x32(v, config) |
| case OpLsh8x64: |
| return rewriteValuegeneric_OpLsh8x64(v, config) |
| case OpLsh8x8: |
| return rewriteValuegeneric_OpLsh8x8(v, config) |
| case OpMod16: |
| return rewriteValuegeneric_OpMod16(v, config) |
| case OpMod16u: |
| return rewriteValuegeneric_OpMod16u(v, config) |
| case OpMod32: |
| return rewriteValuegeneric_OpMod32(v, config) |
| case OpMod32u: |
| return rewriteValuegeneric_OpMod32u(v, config) |
| case OpMod64: |
| return rewriteValuegeneric_OpMod64(v, config) |
| case OpMod64u: |
| return rewriteValuegeneric_OpMod64u(v, config) |
| case OpMod8: |
| return rewriteValuegeneric_OpMod8(v, config) |
| case OpMod8u: |
| return rewriteValuegeneric_OpMod8u(v, config) |
| case OpMul16: |
| return rewriteValuegeneric_OpMul16(v, config) |
| case OpMul32: |
| return rewriteValuegeneric_OpMul32(v, config) |
| case OpMul32F: |
| return rewriteValuegeneric_OpMul32F(v, config) |
| case OpMul64: |
| return rewriteValuegeneric_OpMul64(v, config) |
| case OpMul64F: |
| return rewriteValuegeneric_OpMul64F(v, config) |
| case OpMul8: |
| return rewriteValuegeneric_OpMul8(v, config) |
| case OpNeg16: |
| return rewriteValuegeneric_OpNeg16(v, config) |
| case OpNeg32: |
| return rewriteValuegeneric_OpNeg32(v, config) |
| case OpNeg64: |
| return rewriteValuegeneric_OpNeg64(v, config) |
| case OpNeg8: |
| return rewriteValuegeneric_OpNeg8(v, config) |
| case OpNeq16: |
| return rewriteValuegeneric_OpNeq16(v, config) |
| case OpNeq32: |
| return rewriteValuegeneric_OpNeq32(v, config) |
| case OpNeq64: |
| return rewriteValuegeneric_OpNeq64(v, config) |
| case OpNeq8: |
| return rewriteValuegeneric_OpNeq8(v, config) |
| case OpNeqB: |
| return rewriteValuegeneric_OpNeqB(v, config) |
| case OpNeqInter: |
| return rewriteValuegeneric_OpNeqInter(v, config) |
| case OpNeqPtr: |
| return rewriteValuegeneric_OpNeqPtr(v, config) |
| case OpNeqSlice: |
| return rewriteValuegeneric_OpNeqSlice(v, config) |
| case OpNilCheck: |
| return rewriteValuegeneric_OpNilCheck(v, config) |
| case OpNot: |
| return rewriteValuegeneric_OpNot(v, config) |
| case OpOffPtr: |
| return rewriteValuegeneric_OpOffPtr(v, config) |
| case OpOr16: |
| return rewriteValuegeneric_OpOr16(v, config) |
| case OpOr32: |
| return rewriteValuegeneric_OpOr32(v, config) |
| case OpOr64: |
| return rewriteValuegeneric_OpOr64(v, config) |
| case OpOr8: |
| return rewriteValuegeneric_OpOr8(v, config) |
| case OpPhi: |
| return rewriteValuegeneric_OpPhi(v, config) |
| case OpPtrIndex: |
| return rewriteValuegeneric_OpPtrIndex(v, config) |
| case OpRsh16Ux16: |
| return rewriteValuegeneric_OpRsh16Ux16(v, config) |
| case OpRsh16Ux32: |
| return rewriteValuegeneric_OpRsh16Ux32(v, config) |
| case OpRsh16Ux64: |
| return rewriteValuegeneric_OpRsh16Ux64(v, config) |
| case OpRsh16Ux8: |
| return rewriteValuegeneric_OpRsh16Ux8(v, config) |
| case OpRsh16x16: |
| return rewriteValuegeneric_OpRsh16x16(v, config) |
| case OpRsh16x32: |
| return rewriteValuegeneric_OpRsh16x32(v, config) |
| case OpRsh16x64: |
| return rewriteValuegeneric_OpRsh16x64(v, config) |
| case OpRsh16x8: |
| return rewriteValuegeneric_OpRsh16x8(v, config) |
| case OpRsh32Ux16: |
| return rewriteValuegeneric_OpRsh32Ux16(v, config) |
| case OpRsh32Ux32: |
| return rewriteValuegeneric_OpRsh32Ux32(v, config) |
| case OpRsh32Ux64: |
| return rewriteValuegeneric_OpRsh32Ux64(v, config) |
| case OpRsh32Ux8: |
| return rewriteValuegeneric_OpRsh32Ux8(v, config) |
| case OpRsh32x16: |
| return rewriteValuegeneric_OpRsh32x16(v, config) |
| case OpRsh32x32: |
| return rewriteValuegeneric_OpRsh32x32(v, config) |
| case OpRsh32x64: |
| return rewriteValuegeneric_OpRsh32x64(v, config) |
| case OpRsh32x8: |
| return rewriteValuegeneric_OpRsh32x8(v, config) |
| case OpRsh64Ux16: |
| return rewriteValuegeneric_OpRsh64Ux16(v, config) |
| case OpRsh64Ux32: |
| return rewriteValuegeneric_OpRsh64Ux32(v, config) |
| case OpRsh64Ux64: |
| return rewriteValuegeneric_OpRsh64Ux64(v, config) |
| case OpRsh64Ux8: |
| return rewriteValuegeneric_OpRsh64Ux8(v, config) |
| case OpRsh64x16: |
| return rewriteValuegeneric_OpRsh64x16(v, config) |
| case OpRsh64x32: |
| return rewriteValuegeneric_OpRsh64x32(v, config) |
| case OpRsh64x64: |
| return rewriteValuegeneric_OpRsh64x64(v, config) |
| case OpRsh64x8: |
| return rewriteValuegeneric_OpRsh64x8(v, config) |
| case OpRsh8Ux16: |
| return rewriteValuegeneric_OpRsh8Ux16(v, config) |
| case OpRsh8Ux32: |
| return rewriteValuegeneric_OpRsh8Ux32(v, config) |
| case OpRsh8Ux64: |
| return rewriteValuegeneric_OpRsh8Ux64(v, config) |
| case OpRsh8Ux8: |
| return rewriteValuegeneric_OpRsh8Ux8(v, config) |
| case OpRsh8x16: |
| return rewriteValuegeneric_OpRsh8x16(v, config) |
| case OpRsh8x32: |
| return rewriteValuegeneric_OpRsh8x32(v, config) |
| case OpRsh8x64: |
| return rewriteValuegeneric_OpRsh8x64(v, config) |
| case OpRsh8x8: |
| return rewriteValuegeneric_OpRsh8x8(v, config) |
| case OpSignExt16to32: |
| return rewriteValuegeneric_OpSignExt16to32(v, config) |
| case OpSignExt16to64: |
| return rewriteValuegeneric_OpSignExt16to64(v, config) |
| case OpSignExt32to64: |
| return rewriteValuegeneric_OpSignExt32to64(v, config) |
| case OpSignExt8to16: |
| return rewriteValuegeneric_OpSignExt8to16(v, config) |
| case OpSignExt8to32: |
| return rewriteValuegeneric_OpSignExt8to32(v, config) |
| case OpSignExt8to64: |
| return rewriteValuegeneric_OpSignExt8to64(v, config) |
| case OpSliceCap: |
| return rewriteValuegeneric_OpSliceCap(v, config) |
| case OpSliceLen: |
| return rewriteValuegeneric_OpSliceLen(v, config) |
| case OpSlicePtr: |
| return rewriteValuegeneric_OpSlicePtr(v, config) |
| case OpSlicemask: |
| return rewriteValuegeneric_OpSlicemask(v, config) |
| case OpSqrt: |
| return rewriteValuegeneric_OpSqrt(v, config) |
| case OpStore: |
| return rewriteValuegeneric_OpStore(v, config) |
| case OpStringLen: |
| return rewriteValuegeneric_OpStringLen(v, config) |
| case OpStringPtr: |
| return rewriteValuegeneric_OpStringPtr(v, config) |
| case OpStructSelect: |
| return rewriteValuegeneric_OpStructSelect(v, config) |
| case OpSub16: |
| return rewriteValuegeneric_OpSub16(v, config) |
| case OpSub32: |
| return rewriteValuegeneric_OpSub32(v, config) |
| case OpSub32F: |
| return rewriteValuegeneric_OpSub32F(v, config) |
| case OpSub64: |
| return rewriteValuegeneric_OpSub64(v, config) |
| case OpSub64F: |
| return rewriteValuegeneric_OpSub64F(v, config) |
| case OpSub8: |
| return rewriteValuegeneric_OpSub8(v, config) |
| case OpTrunc16to8: |
| return rewriteValuegeneric_OpTrunc16to8(v, config) |
| case OpTrunc32to16: |
| return rewriteValuegeneric_OpTrunc32to16(v, config) |
| case OpTrunc32to8: |
| return rewriteValuegeneric_OpTrunc32to8(v, config) |
| case OpTrunc64to16: |
| return rewriteValuegeneric_OpTrunc64to16(v, config) |
| case OpTrunc64to32: |
| return rewriteValuegeneric_OpTrunc64to32(v, config) |
| case OpTrunc64to8: |
| return rewriteValuegeneric_OpTrunc64to8(v, config) |
| case OpXor16: |
| return rewriteValuegeneric_OpXor16(v, config) |
| case OpXor32: |
| return rewriteValuegeneric_OpXor32(v, config) |
| case OpXor64: |
| return rewriteValuegeneric_OpXor64(v, config) |
| case OpXor8: |
| return rewriteValuegeneric_OpXor8(v, config) |
| case OpZero: |
| return rewriteValuegeneric_OpZero(v, config) |
| case OpZeroExt16to32: |
| return rewriteValuegeneric_OpZeroExt16to32(v, config) |
| case OpZeroExt16to64: |
| return rewriteValuegeneric_OpZeroExt16to64(v, config) |
| case OpZeroExt32to64: |
| return rewriteValuegeneric_OpZeroExt32to64(v, config) |
| case OpZeroExt8to16: |
| return rewriteValuegeneric_OpZeroExt8to16(v, config) |
| case OpZeroExt8to32: |
| return rewriteValuegeneric_OpZeroExt8to32(v, config) |
| case OpZeroExt8to64: |
| return rewriteValuegeneric_OpZeroExt8to64(v, config) |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Add16 (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (Const16 [int64(int16(c+d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConst16) |
| v.AuxInt = int64(int16(c + d)) |
| return true |
| } |
| // match: (Add16 x (Const16 <t> [c])) |
| // cond: x.Op != OpConst16 |
| // result: (Add16 (Const16 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst16) { |
| break |
| } |
| v.reset(OpAdd16) |
| v0 := b.NewValue0(v.Line, OpConst16, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Add16 (Const16 [0]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Add32 (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (Const32 [int64(int32(c+d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConst32) |
| v.AuxInt = int64(int32(c + d)) |
| return true |
| } |
| // match: (Add32 x (Const32 <t> [c])) |
| // cond: x.Op != OpConst32 |
| // result: (Add32 (Const32 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst32) { |
| break |
| } |
| v.reset(OpAdd32) |
| v0 := b.NewValue0(v.Line, OpConst32, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Add32 (Const32 [0]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd32F(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Add32F (Const32F [c]) (Const32F [d])) |
| // cond: |
| // result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32F { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32F { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConst32F) |
| v.AuxInt = f2i(float64(i2f32(c) + i2f32(d))) |
| return true |
| } |
| // match: (Add32F x (Const32F [0])) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32F { |
| break |
| } |
| if v_1.AuxInt != 0 { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (Add32F (Const32F [0]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32F { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Add64 (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (Const64 [c+d]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConst64) |
| v.AuxInt = c + d |
| return true |
| } |
| // match: (Add64 x (Const64 <t> [c])) |
| // cond: x.Op != OpConst64 |
| // result: (Add64 (Const64 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst64) { |
| break |
| } |
| v.reset(OpAdd64) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Add64 (Const64 [0]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd64F(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Add64F (Const64F [c]) (Const64F [d])) |
| // cond: |
| // result: (Const64F [f2i(i2f(c) + i2f(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64F { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64F { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConst64F) |
| v.AuxInt = f2i(i2f(c) + i2f(d)) |
| return true |
| } |
| // match: (Add64F x (Const64F [0])) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64F { |
| break |
| } |
| if v_1.AuxInt != 0 { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (Add64F (Const64F [0]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64F { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Add8 (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (Const8 [int64(int8(c+d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConst8) |
| v.AuxInt = int64(int8(c + d)) |
| return true |
| } |
| // match: (Add8 x (Const8 <t> [c])) |
| // cond: x.Op != OpConst8 |
| // result: (Add8 (Const8 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst8) { |
| break |
| } |
| v.reset(OpAdd8) |
| v0 := b.NewValue0(v.Line, OpConst8, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Add8 (Const8 [0]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAddPtr(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (AddPtr <t> x (Const64 [c])) |
| // cond: |
| // result: (OffPtr <t> x [c]) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpOffPtr) |
| v.Type = t |
| v.AuxInt = c |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (And16 x (Const16 <t> [c])) |
| // cond: x.Op != OpConst16 |
| // result: (And16 (Const16 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst16) { |
| break |
| } |
| v.reset(OpAnd16) |
| v0 := b.NewValue0(v.Line, OpConst16, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (And16 x x) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (And16 (Const16 [-1]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| if v_0.AuxInt != -1 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (And16 (Const16 [0]) _) |
| // cond: |
| // result: (Const16 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst16) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (And16 x (And16 x y)) |
| // cond: |
| // result: (And16 x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpAnd16 { |
| break |
| } |
| if x != v_1.Args[0] { |
| break |
| } |
| y := v_1.Args[1] |
| v.reset(OpAnd16) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And16 x (And16 y x)) |
| // cond: |
| // result: (And16 x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpAnd16 { |
| break |
| } |
| y := v_1.Args[0] |
| if x != v_1.Args[1] { |
| break |
| } |
| v.reset(OpAnd16) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And16 (And16 x y) x) |
| // cond: |
| // result: (And16 x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd16 { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpAnd16) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And16 (And16 x y) y) |
| // cond: |
| // result: (And16 x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd16 { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| if y != v.Args[1] { |
| break |
| } |
| v.reset(OpAnd16) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (And32 x (Const32 <t> [c])) |
| // cond: x.Op != OpConst32 |
| // result: (And32 (Const32 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst32) { |
| break |
| } |
| v.reset(OpAnd32) |
| v0 := b.NewValue0(v.Line, OpConst32, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (And32 x x) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (And32 (Const32 [-1]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| if v_0.AuxInt != -1 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (And32 (Const32 [0]) _) |
| // cond: |
| // result: (Const32 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst32) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (And32 x (And32 x y)) |
| // cond: |
| // result: (And32 x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpAnd32 { |
| break |
| } |
| if x != v_1.Args[0] { |
| break |
| } |
| y := v_1.Args[1] |
| v.reset(OpAnd32) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And32 x (And32 y x)) |
| // cond: |
| // result: (And32 x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpAnd32 { |
| break |
| } |
| y := v_1.Args[0] |
| if x != v_1.Args[1] { |
| break |
| } |
| v.reset(OpAnd32) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And32 (And32 x y) x) |
| // cond: |
| // result: (And32 x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd32 { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpAnd32) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And32 (And32 x y) y) |
| // cond: |
| // result: (And32 x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd32 { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| if y != v.Args[1] { |
| break |
| } |
| v.reset(OpAnd32) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (And64 x (Const64 <t> [c])) |
| // cond: x.Op != OpConst64 |
| // result: (And64 (Const64 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst64) { |
| break |
| } |
| v.reset(OpAnd64) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (And64 x x) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (And64 (Const64 [-1]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| if v_0.AuxInt != -1 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (And64 (Const64 [0]) _) |
| // cond: |
| // result: (Const64 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst64) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (And64 x (And64 x y)) |
| // cond: |
| // result: (And64 x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpAnd64 { |
| break |
| } |
| if x != v_1.Args[0] { |
| break |
| } |
| y := v_1.Args[1] |
| v.reset(OpAnd64) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And64 x (And64 y x)) |
| // cond: |
| // result: (And64 x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpAnd64 { |
| break |
| } |
| y := v_1.Args[0] |
| if x != v_1.Args[1] { |
| break |
| } |
| v.reset(OpAnd64) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And64 (And64 x y) x) |
| // cond: |
| // result: (And64 x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd64 { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpAnd64) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And64 (And64 x y) y) |
| // cond: |
| // result: (And64 x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd64 { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| if y != v.Args[1] { |
| break |
| } |
| v.reset(OpAnd64) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And64 <t> (Const64 [y]) x) |
| // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 |
| // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) |
| for { |
| t := v.Type |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| y := v_0.AuxInt |
| x := v.Args[1] |
| if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) { |
| break |
| } |
| v.reset(OpRsh64Ux64) |
| v0 := b.NewValue0(v.Line, OpLsh64x64, t) |
| v0.AddArg(x) |
| v1 := b.NewValue0(v.Line, OpConst64, t) |
| v1.AuxInt = nlz(y) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| v2 := b.NewValue0(v.Line, OpConst64, t) |
| v2.AuxInt = nlz(y) |
| v.AddArg(v2) |
| return true |
| } |
| // match: (And64 <t> (Const64 [y]) x) |
| // cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32 |
| // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) |
| for { |
| t := v.Type |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| y := v_0.AuxInt |
| x := v.Args[1] |
| if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) { |
| break |
| } |
| v.reset(OpLsh64x64) |
| v0 := b.NewValue0(v.Line, OpRsh64Ux64, t) |
| v0.AddArg(x) |
| v1 := b.NewValue0(v.Line, OpConst64, t) |
| v1.AuxInt = ntz(y) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| v2 := b.NewValue0(v.Line, OpConst64, t) |
| v2.AuxInt = ntz(y) |
| v.AddArg(v2) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (And8 x (Const8 <t> [c])) |
| // cond: x.Op != OpConst8 |
| // result: (And8 (Const8 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst8) { |
| break |
| } |
| v.reset(OpAnd8) |
| v0 := b.NewValue0(v.Line, OpConst8, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (And8 x x) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (And8 (Const8 [-1]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| if v_0.AuxInt != -1 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (And8 (Const8 [0]) _) |
| // cond: |
| // result: (Const8 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst8) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (And8 x (And8 x y)) |
| // cond: |
| // result: (And8 x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpAnd8 { |
| break |
| } |
| if x != v_1.Args[0] { |
| break |
| } |
| y := v_1.Args[1] |
| v.reset(OpAnd8) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And8 x (And8 y x)) |
| // cond: |
| // result: (And8 x y) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpAnd8 { |
| break |
| } |
| y := v_1.Args[0] |
| if x != v_1.Args[1] { |
| break |
| } |
| v.reset(OpAnd8) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And8 (And8 x y) x) |
| // cond: |
| // result: (And8 x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd8 { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpAnd8) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| // match: (And8 (And8 x y) y) |
| // cond: |
| // result: (And8 x y) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd8 { |
| break |
| } |
| x := v_0.Args[0] |
| y := v_0.Args[1] |
| if y != v.Args[1] { |
| break |
| } |
| v.reset(OpAnd8) |
| v.AddArg(x) |
| v.AddArg(y) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpArg(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Arg {n} [off]) |
| // cond: v.Type.IsString() |
| // result: (StringMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])) |
| for { |
| off := v.AuxInt |
| n := v.Aux |
| if !(v.Type.IsString()) { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) |
| v0.AuxInt = off |
| v0.Aux = n |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) |
| v1.AuxInt = off + config.PtrSize |
| v1.Aux = n |
| v.AddArg(v1) |
| return true |
| } |
| // match: (Arg {n} [off]) |
| // cond: v.Type.IsSlice() |
| // result: (SliceMake (Arg <v.Type.ElemType().PtrTo()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize])) |
| for { |
| off := v.AuxInt |
| n := v.Aux |
| if !(v.Type.IsSlice()) { |
| break |
| } |
| v.reset(OpSliceMake) |
| v0 := b.NewValue0(v.Line, OpArg, v.Type.ElemType().PtrTo()) |
| v0.AuxInt = off |
| v0.Aux = n |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) |
| v1.AuxInt = off + config.PtrSize |
| v1.Aux = n |
| v.AddArg(v1) |
| v2 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) |
| v2.AuxInt = off + 2*config.PtrSize |
| v2.Aux = n |
| v.AddArg(v2) |
| return true |
| } |
| // match: (Arg {n} [off]) |
| // cond: v.Type.IsInterface() |
| // result: (IMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize])) |
| for { |
| off := v.AuxInt |
| n := v.Aux |
| if !(v.Type.IsInterface()) { |
| break |
| } |
| v.reset(OpIMake) |
| v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) |
| v0.AuxInt = off |
| v0.Aux = n |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) |
| v1.AuxInt = off + config.PtrSize |
| v1.Aux = n |
| v.AddArg(v1) |
| return true |
| } |
| // match: (Arg {n} [off]) |
| // cond: v.Type.IsComplex() && v.Type.Size() == 16 |
| // result: (ComplexMake (Arg <config.fe.TypeFloat64()> {n} [off]) (Arg <config.fe.TypeFloat64()> {n} [off+8])) |
| for { |
| off := v.AuxInt |
| n := v.Aux |
| if !(v.Type.IsComplex() && v.Type.Size() == 16) { |
| break |
| } |
| v.reset(OpComplexMake) |
| v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) |
| v0.AuxInt = off |
| v0.Aux = n |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) |
| v1.AuxInt = off + 8 |
| v1.Aux = n |
| v.AddArg(v1) |
| return true |
| } |
| // match: (Arg {n} [off]) |
| // cond: v.Type.IsComplex() && v.Type.Size() == 8 |
| // result: (ComplexMake (Arg <config.fe.TypeFloat32()> {n} [off]) (Arg <config.fe.TypeFloat32()> {n} [off+4])) |
| for { |
| off := v.AuxInt |
| n := v.Aux |
| if !(v.Type.IsComplex() && v.Type.Size() == 8) { |
| break |
| } |
| v.reset(OpComplexMake) |
| v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) |
| v0.AuxInt = off |
| v0.Aux = n |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) |
| v1.AuxInt = off + 4 |
| v1.Aux = n |
| v.AddArg(v1) |
| return true |
| } |
| // match: (Arg <t>) |
| // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) |
| // result: (StructMake0) |
| for { |
| t := v.Type |
| if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake0) |
| return true |
| } |
| // match: (Arg <t> {n} [off]) |
| // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) |
| // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) |
| for { |
| t := v.Type |
| off := v.AuxInt |
| n := v.Aux |
| if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake1) |
| v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) |
| v0.AuxInt = off + t.FieldOff(0) |
| v0.Aux = n |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Arg <t> {n} [off]) |
| // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) |
| // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) |
| for { |
| t := v.Type |
| off := v.AuxInt |
| n := v.Aux |
| if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake2) |
| v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) |
| v0.AuxInt = off + t.FieldOff(0) |
| v0.Aux = n |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) |
| v1.AuxInt = off + t.FieldOff(1) |
| v1.Aux = n |
| v.AddArg(v1) |
| return true |
| } |
| // match: (Arg <t> {n} [off]) |
| // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) |
| // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) |
| for { |
| t := v.Type |
| off := v.AuxInt |
| n := v.Aux |
| if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake3) |
| v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) |
| v0.AuxInt = off + t.FieldOff(0) |
| v0.Aux = n |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) |
| v1.AuxInt = off + t.FieldOff(1) |
| v1.Aux = n |
| v.AddArg(v1) |
| v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) |
| v2.AuxInt = off + t.FieldOff(2) |
| v2.Aux = n |
| v.AddArg(v2) |
| return true |
| } |
| // match: (Arg <t> {n} [off]) |
| // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) |
| // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) |
| for { |
| t := v.Type |
| off := v.AuxInt |
| n := v.Aux |
| if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake4) |
| v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) |
| v0.AuxInt = off + t.FieldOff(0) |
| v0.Aux = n |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) |
| v1.AuxInt = off + t.FieldOff(1) |
| v1.Aux = n |
| v.AddArg(v1) |
| v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) |
| v2.AuxInt = off + t.FieldOff(2) |
| v2.Aux = n |
| v.AddArg(v2) |
| v3 := b.NewValue0(v.Line, OpArg, t.FieldType(3)) |
| v3.AuxInt = off + t.FieldOff(3) |
| v3.Aux = n |
| v.AddArg(v3) |
| return true |
| } |
| // match: (Arg <t>) |
| // cond: t.IsArray() && t.NumElem() == 0 |
| // result: (ArrayMake0) |
| for { |
| t := v.Type |
| if !(t.IsArray() && t.NumElem() == 0) { |
| break |
| } |
| v.reset(OpArrayMake0) |
| return true |
| } |
| // match: (Arg <t> {n} [off]) |
| // cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t) |
| // result: (ArrayMake1 (Arg <t.ElemType()> {n} [off])) |
| for { |
| t := v.Type |
| off := v.AuxInt |
| n := v.Aux |
| if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpArrayMake1) |
| v0 := b.NewValue0(v.Line, OpArg, t.ElemType()) |
| v0.AuxInt = off |
| v0.Aux = n |
| v.AddArg(v0) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpArraySelect(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ArraySelect (ArrayMake1 x)) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpArrayMake1 { |
| break |
| } |
| x := v_0.Args[0] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (ArraySelect [0] (Load ptr mem)) |
| // cond: |
| // result: (Load ptr mem) |
| for { |
| if v.AuxInt != 0 { |
| break |
| } |
| v_0 := v.Args[0] |
| if v_0.Op != OpLoad { |
| break |
| } |
| ptr := v_0.Args[0] |
| mem := v_0.Args[1] |
| v.reset(OpLoad) |
| v.AddArg(ptr) |
| v.AddArg(mem) |
| return true |
| } |
| // match: (ArraySelect [0] x:(IData _)) |
| // cond: |
| // result: x |
| for { |
| if v.AuxInt != 0 { |
| break |
| } |
| x := v.Args[0] |
| if x.Op != OpIData { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Com16 (Com16 x)) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpCom16 { |
| break |
| } |
| x := v_0.Args[0] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Com32 (Com32 x)) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpCom32 { |
| break |
| } |
| x := v_0.Args[0] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Com64 (Com64 x)) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpCom64 { |
| break |
| } |
| x := v_0.Args[0] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Com8 (Com8 x)) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpCom8 { |
| break |
| } |
| x := v_0.Args[0] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ConstInterface) |
| // cond: |
| // result: (IMake (ConstNil <config.fe.TypeBytePtr()>) (ConstNil <config.fe.TypeBytePtr()>)) |
| for { |
| v.reset(OpIMake) |
| v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) |
| v.AddArg(v1) |
| return true |
| } |
| } |
| func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ConstSlice) |
| // cond: config.PtrSize == 4 |
| // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const32 <config.fe.TypeInt()> [0]) (Const32 <config.fe.TypeInt()> [0])) |
| for { |
| if !(config.PtrSize == 4) { |
| break |
| } |
| v.reset(OpSliceMake) |
| v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) |
| v1.AuxInt = 0 |
| v.AddArg(v1) |
| v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) |
| v2.AuxInt = 0 |
| v.AddArg(v2) |
| return true |
| } |
| // match: (ConstSlice) |
| // cond: config.PtrSize == 8 |
| // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const64 <config.fe.TypeInt()> [0]) (Const64 <config.fe.TypeInt()> [0])) |
| for { |
| if !(config.PtrSize == 8) { |
| break |
| } |
| v.reset(OpSliceMake) |
| v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) |
| v1.AuxInt = 0 |
| v.AddArg(v1) |
| v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) |
| v2.AuxInt = 0 |
| v.AddArg(v2) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (ConstString {s}) |
| // cond: config.PtrSize == 4 && s.(string) == "" |
| // result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0])) |
| for { |
| s := v.Aux |
| if !(config.PtrSize == 4 && s.(string) == "") { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) |
| v1.AuxInt = 0 |
| v.AddArg(v1) |
| return true |
| } |
| // match: (ConstString {s}) |
| // cond: config.PtrSize == 8 && s.(string) == "" |
| // result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0])) |
| for { |
| s := v.Aux |
| if !(config.PtrSize == 8 && s.(string) == "") { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) |
| v1.AuxInt = 0 |
| v.AddArg(v1) |
| return true |
| } |
| // match: (ConstString {s}) |
| // cond: config.PtrSize == 4 && s.(string) != "" |
| // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))])) |
| for { |
| s := v.Aux |
| if !(config.PtrSize == 4 && s.(string) != "") { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) |
| v0.Aux = config.fe.StringData(s.(string)) |
| v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) |
| v2.AuxInt = int64(len(s.(string))) |
| v.AddArg(v2) |
| return true |
| } |
| // match: (ConstString {s}) |
| // cond: config.PtrSize == 8 && s.(string) != "" |
| // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))])) |
| for { |
| s := v.Aux |
| if !(config.PtrSize == 8 && s.(string) != "") { |
| break |
| } |
| v.reset(OpStringMake) |
| v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) |
| v0.Aux = config.fe.StringData(s.(string)) |
| v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) |
| v0.AddArg(v1) |
| v.AddArg(v0) |
| v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) |
| v2.AuxInt = int64(len(s.(string))) |
| v.AddArg(v2) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Convert (Add64 (Convert ptr mem) off) mem) |
| // cond: |
| // result: (Add64 ptr off) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAdd64 { |
| break |
| } |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConvert { |
| break |
| } |
| ptr := v_0_0.Args[0] |
| mem := v_0_0.Args[1] |
| off := v_0.Args[1] |
| if mem != v.Args[1] { |
| break |
| } |
| v.reset(OpAdd64) |
| v.AddArg(ptr) |
| v.AddArg(off) |
| return true |
| } |
| // match: (Convert (Add64 off (Convert ptr mem)) mem) |
| // cond: |
| // result: (Add64 ptr off) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAdd64 { |
| break |
| } |
| off := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| if v_0_1.Op != OpConvert { |
| break |
| } |
| ptr := v_0_1.Args[0] |
| mem := v_0_1.Args[1] |
| if mem != v.Args[1] { |
| break |
| } |
| v.reset(OpAdd64) |
| v.AddArg(ptr) |
| v.AddArg(off) |
| return true |
| } |
| // match: (Convert (Convert ptr mem) mem) |
| // cond: |
| // result: ptr |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConvert { |
| break |
| } |
| ptr := v_0.Args[0] |
| mem := v_0.Args[1] |
| if mem != v.Args[1] { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = ptr.Type |
| v.AddArg(ptr) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt32Fto64F(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Cvt32Fto64F (Const32F [c])) |
| // cond: |
| // result: (Const64F [c]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32F { |
| break |
| } |
| c := v_0.AuxInt |
| v.reset(OpConst64F) |
| v.AuxInt = c |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpCvt64Fto32F(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Cvt64Fto32F (Const64F [c])) |
| // cond: |
| // result: (Const32F [f2i(float64(i2f32(c)))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64F { |
| break |
| } |
| c := v_0.AuxInt |
| v.reset(OpConst32F) |
| v.AuxInt = f2i(float64(i2f32(c))) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpDiv32F(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Div32F x (Const32F [f2i(1)])) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32F { |
| break |
| } |
| if v_1.AuxInt != f2i(1) { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (Div32F x (Const32F [f2i(-1)])) |
| // cond: |
| // result: (Neg32F x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32F { |
| break |
| } |
| if v_1.AuxInt != f2i(-1) { |
| break |
| } |
| v.reset(OpNeg32F) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Div64 <t> x (Const64 [c])) |
| // cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0 |
| // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) { |
| break |
| } |
| v.reset(OpSub64) |
| v.Type = t |
| v0 := b.NewValue0(v.Line, OpRsh64x64, t) |
| v1 := b.NewValue0(v.Line, OpHmul64, t) |
| v2 := b.NewValue0(v.Line, OpConst64, t) |
| v2.AuxInt = smagic64m(c) |
| v1.AddArg(v2) |
| v1.AddArg(x) |
| v0.AddArg(v1) |
| v3 := b.NewValue0(v.Line, OpConst64, t) |
| v3.AuxInt = smagic64s(c) |
| v0.AddArg(v3) |
| v.AddArg(v0) |
| v4 := b.NewValue0(v.Line, OpRsh64x64, t) |
| v4.AddArg(x) |
| v5 := b.NewValue0(v.Line, OpConst64, t) |
| v5.AuxInt = 63 |
| v4.AddArg(v5) |
| v.AddArg(v4) |
| return true |
| } |
| // match: (Div64 <t> x (Const64 [c])) |
| // cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0 |
| // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) { |
| break |
| } |
| v.reset(OpSub64) |
| v.Type = t |
| v0 := b.NewValue0(v.Line, OpRsh64x64, t) |
| v1 := b.NewValue0(v.Line, OpAdd64, t) |
| v2 := b.NewValue0(v.Line, OpHmul64, t) |
| v3 := b.NewValue0(v.Line, OpConst64, t) |
| v3.AuxInt = smagic64m(c) |
| v2.AddArg(v3) |
| v2.AddArg(x) |
| v1.AddArg(v2) |
| v1.AddArg(x) |
| v0.AddArg(v1) |
| v4 := b.NewValue0(v.Line, OpConst64, t) |
| v4.AuxInt = smagic64s(c) |
| v0.AddArg(v4) |
| v.AddArg(v0) |
| v5 := b.NewValue0(v.Line, OpRsh64x64, t) |
| v5.AddArg(x) |
| v6 := b.NewValue0(v.Line, OpConst64, t) |
| v6.AuxInt = 63 |
| v5.AddArg(v6) |
| v.AddArg(v5) |
| return true |
| } |
| // match: (Div64 <t> x (Const64 [c])) |
| // cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0 |
| // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) { |
| break |
| } |
| v.reset(OpNeg64) |
| v.Type = t |
| v0 := b.NewValue0(v.Line, OpSub64, t) |
| v1 := b.NewValue0(v.Line, OpRsh64x64, t) |
| v2 := b.NewValue0(v.Line, OpHmul64, t) |
| v3 := b.NewValue0(v.Line, OpConst64, t) |
| v3.AuxInt = smagic64m(c) |
| v2.AddArg(v3) |
| v2.AddArg(x) |
| v1.AddArg(v2) |
| v4 := b.NewValue0(v.Line, OpConst64, t) |
| v4.AuxInt = smagic64s(c) |
| v1.AddArg(v4) |
| v0.AddArg(v1) |
| v5 := b.NewValue0(v.Line, OpRsh64x64, t) |
| v5.AddArg(x) |
| v6 := b.NewValue0(v.Line, OpConst64, t) |
| v6.AuxInt = 63 |
| v5.AddArg(v6) |
| v0.AddArg(v5) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Div64 <t> x (Const64 [c])) |
| // cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0 |
| // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) { |
| break |
| } |
| v.reset(OpNeg64) |
| v.Type = t |
| v0 := b.NewValue0(v.Line, OpSub64, t) |
| v1 := b.NewValue0(v.Line, OpRsh64x64, t) |
| v2 := b.NewValue0(v.Line, OpAdd64, t) |
| v3 := b.NewValue0(v.Line, OpHmul64, t) |
| v4 := b.NewValue0(v.Line, OpConst64, t) |
| v4.AuxInt = smagic64m(c) |
| v3.AddArg(v4) |
| v3.AddArg(x) |
| v2.AddArg(v3) |
| v2.AddArg(x) |
| v1.AddArg(v2) |
| v5 := b.NewValue0(v.Line, OpConst64, t) |
| v5.AuxInt = smagic64s(c) |
| v1.AddArg(v5) |
| v0.AddArg(v1) |
| v6 := b.NewValue0(v.Line, OpRsh64x64, t) |
| v6.AddArg(x) |
| v7 := b.NewValue0(v.Line, OpConst64, t) |
| v7.AuxInt = 63 |
| v6.AddArg(v7) |
| v0.AddArg(v6) |
| v.AddArg(v0) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpDiv64F(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Div64F x (Const64F [f2i(1)])) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64F { |
| break |
| } |
| if v_1.AuxInt != f2i(1) { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (Div64F x (Const64F [f2i(-1)])) |
| // cond: |
| // result: (Neg32F x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64F { |
| break |
| } |
| if v_1.AuxInt != f2i(-1) { |
| break |
| } |
| v.reset(OpNeg32F) |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Div64u <t> n (Const64 [c])) |
| // cond: isPowerOfTwo(c) |
| // result: (Rsh64Ux64 n (Const64 <t> [log2(c)])) |
| for { |
| t := v.Type |
| n := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !(isPowerOfTwo(c)) { |
| break |
| } |
| v.reset(OpRsh64Ux64) |
| v.AddArg(n) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = log2(c) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Div64u <t> x (Const64 [c])) |
| // cond: umagic64ok(c) && !umagic64a(c) |
| // result: (Rsh64Ux64 (Hmul64u <t> (Const64 <t> [umagic64m(c)]) x) (Const64 <t> [umagic64s(c)])) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !(umagic64ok(c) && !umagic64a(c)) { |
| break |
| } |
| v.reset(OpRsh64Ux64) |
| v0 := b.NewValue0(v.Line, OpHmul64u, t) |
| v1 := b.NewValue0(v.Line, OpConst64, t) |
| v1.AuxInt = umagic64m(c) |
| v0.AddArg(v1) |
| v0.AddArg(x) |
| v.AddArg(v0) |
| v2 := b.NewValue0(v.Line, OpConst64, t) |
| v2.AuxInt = umagic64s(c) |
| v.AddArg(v2) |
| return true |
| } |
| // match: (Div64u <t> x (Const64 [c])) |
| // cond: umagic64ok(c) && umagic64a(c) |
| // result: (Rsh64Ux64 (Avg64u <t> (Hmul64u <t> x (Const64 <t> [umagic64m(c)])) x) (Const64 <t> [umagic64s(c)-1])) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !(umagic64ok(c) && umagic64a(c)) { |
| break |
| } |
| v.reset(OpRsh64Ux64) |
| v0 := b.NewValue0(v.Line, OpAvg64u, t) |
| v1 := b.NewValue0(v.Line, OpHmul64u, t) |
| v1.AddArg(x) |
| v2 := b.NewValue0(v.Line, OpConst64, t) |
| v2.AuxInt = umagic64m(c) |
| v1.AddArg(v2) |
| v0.AddArg(v1) |
| v0.AddArg(x) |
| v.AddArg(v0) |
| v3 := b.NewValue0(v.Line, OpConst64, t) |
| v3.AuxInt = umagic64s(c) - 1 |
| v.AddArg(v3) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Eq16 x x) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) |
| // cond: |
| // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| t := v_0.Type |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpAdd16 { |
| break |
| } |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst16 { |
| break |
| } |
| if v_1_0.Type != t { |
| break |
| } |
| d := v_1_0.AuxInt |
| x := v_1.Args[1] |
| v.reset(OpEq16) |
| v0 := b.NewValue0(v.Line, OpConst16, t) |
| v0.AuxInt = int64(int16(c - d)) |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Eq16 x (Const16 <t> [c])) |
| // cond: x.Op != OpConst16 |
| // result: (Eq16 (Const16 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst16) { |
| break |
| } |
| v.reset(OpEq16) |
| v0 := b.NewValue0(v.Line, OpConst16, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Eq16 (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c == d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c == d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Eq32 x x) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) |
| // cond: |
| // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| t := v_0.Type |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpAdd32 { |
| break |
| } |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst32 { |
| break |
| } |
| if v_1_0.Type != t { |
| break |
| } |
| d := v_1_0.AuxInt |
| x := v_1.Args[1] |
| v.reset(OpEq32) |
| v0 := b.NewValue0(v.Line, OpConst32, t) |
| v0.AuxInt = int64(int32(c - d)) |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Eq32 x (Const32 <t> [c])) |
| // cond: x.Op != OpConst32 |
| // result: (Eq32 (Const32 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst32) { |
| break |
| } |
| v.reset(OpEq32) |
| v0 := b.NewValue0(v.Line, OpConst32, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Eq32 (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c == d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c == d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Eq64 x x) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) |
| // cond: |
| // result: (Eq64 (Const64 <t> [c-d]) x) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| t := v_0.Type |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpAdd64 { |
| break |
| } |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst64 { |
| break |
| } |
| if v_1_0.Type != t { |
| break |
| } |
| d := v_1_0.AuxInt |
| x := v_1.Args[1] |
| v.reset(OpEq64) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = c - d |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Eq64 x (Const64 <t> [c])) |
| // cond: x.Op != OpConst64 |
| // result: (Eq64 (Const64 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst64) { |
| break |
| } |
| v.reset(OpEq64) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Eq64 (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c == d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c == d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Eq8 x x) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) |
| // cond: |
| // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| t := v_0.Type |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpAdd8 { |
| break |
| } |
| v_1_0 := v_1.Args[0] |
| if v_1_0.Op != OpConst8 { |
| break |
| } |
| if v_1_0.Type != t { |
| break |
| } |
| d := v_1_0.AuxInt |
| x := v_1.Args[1] |
| v.reset(OpEq8) |
| v0 := b.NewValue0(v.Line, OpConst8, t) |
| v0.AuxInt = int64(int8(c - d)) |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Eq8 x (Const8 <t> [c])) |
| // cond: x.Op != OpConst8 |
| // result: (Eq8 (Const8 <t> [c]) x) |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| t := v_1.Type |
| c := v_1.AuxInt |
| if !(x.Op != OpConst8) { |
| break |
| } |
| v.reset(OpEq8) |
| v0 := b.NewValue0(v.Line, OpConst8, t) |
| v0.AuxInt = c |
| v.AddArg(v0) |
| v.AddArg(x) |
| return true |
| } |
| // match: (Eq8 (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c == d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c == d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpEqB(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (EqB (ConstBool [c]) (ConstBool [d])) |
| // cond: |
| // result: (ConstBool [b2i(c == d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConstBool { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConstBool { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c == d) |
| return true |
| } |
| // match: (EqB (ConstBool [0]) x) |
| // cond: |
| // result: (Not x) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConstBool { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpNot) |
| v.AddArg(x) |
| return true |
| } |
| // match: (EqB (ConstBool [1]) x) |
| // cond: |
| // result: x |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConstBool { |
| break |
| } |
| if v_0.AuxInt != 1 { |
| break |
| } |
| x := v.Args[1] |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (EqInter x y) |
| // cond: |
| // result: (EqPtr (ITab x) (ITab y)) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v.reset(OpEqPtr) |
| v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) |
| v0.AddArg(x) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) |
| v1.AddArg(y) |
| v.AddArg(v1) |
| return true |
| } |
| } |
| func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (EqPtr p (ConstNil)) |
| // cond: |
| // result: (Not (IsNonNil p)) |
| for { |
| p := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConstNil { |
| break |
| } |
| v.reset(OpNot) |
| v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) |
| v0.AddArg(p) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (EqPtr (ConstNil) p) |
| // cond: |
| // result: (Not (IsNonNil p)) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConstNil { |
| break |
| } |
| p := v.Args[1] |
| v.reset(OpNot) |
| v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) |
| v0.AddArg(p) |
| v.AddArg(v0) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (EqSlice x y) |
| // cond: |
| // result: (EqPtr (SlicePtr x) (SlicePtr y)) |
| for { |
| x := v.Args[0] |
| y := v.Args[1] |
| v.reset(OpEqPtr) |
| v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) |
| v0.AddArg(x) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) |
| v1.AddArg(y) |
| v.AddArg(v1) |
| return true |
| } |
| } |
| func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Geq16 (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c >= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c >= d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Geq16U (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint16(c) >= uint16(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint16(c) >= uint16(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Geq32 (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c >= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c >= d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Geq32U (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint32(c) >= uint32(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint32(c) >= uint32(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Geq64 (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c >= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c >= d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Geq64U (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint64(c) >= uint64(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint64(c) >= uint64(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Geq8 (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c >= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c >= d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Geq8U (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint8(c) >= uint8(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint8(c) >= uint8(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Greater16 (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c > d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c > d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Greater16U (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint16(c) > uint16(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint16(c) > uint16(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Greater32 (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c > d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c > d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Greater32U (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint32(c) > uint32(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint32(c) > uint32(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Greater64 (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c > d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c > d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Greater64U (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint64(c) > uint64(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint64(c) > uint64(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Greater8 (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c > d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c > d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Greater8U (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint8(c) > uint8(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint8(c) > uint8(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpIMake(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (IMake typ (StructMake1 val)) |
| // cond: |
| // result: (IMake typ val) |
| for { |
| typ := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpStructMake1 { |
| break |
| } |
| val := v_1.Args[0] |
| v.reset(OpIMake) |
| v.AddArg(typ) |
| v.AddArg(val) |
| return true |
| } |
| // match: (IMake typ (ArrayMake1 val)) |
| // cond: |
| // result: (IMake typ val) |
| for { |
| typ := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpArrayMake1 { |
| break |
| } |
| val := v_1.Args[0] |
| v.reset(OpIMake) |
| v.AddArg(typ) |
| v.AddArg(val) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (IsInBounds (ZeroExt8to32 _) (Const32 [c])) |
| // cond: (1 << 8) <= c |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpZeroExt8to32 { |
| break |
| } |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| c := v_1.AuxInt |
| if !((1 << 8) <= c) { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsInBounds (ZeroExt8to64 _) (Const64 [c])) |
| // cond: (1 << 8) <= c |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpZeroExt8to64 { |
| break |
| } |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !((1 << 8) <= c) { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsInBounds (ZeroExt16to32 _) (Const32 [c])) |
| // cond: (1 << 16) <= c |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpZeroExt16to32 { |
| break |
| } |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| c := v_1.AuxInt |
| if !((1 << 16) <= c) { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsInBounds (ZeroExt16to64 _) (Const64 [c])) |
| // cond: (1 << 16) <= c |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpZeroExt16to64 { |
| break |
| } |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !((1 << 16) <= c) { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsInBounds x x) |
| // cond: |
| // result: (ConstBool [0]) |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) |
| // cond: 0 <= c && c < d |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd32 { |
| break |
| } |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| break |
| } |
| c := v_0_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| if !(0 <= c && c < d) { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) |
| // cond: 0 <= c && c < d |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd64 { |
| break |
| } |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| break |
| } |
| c := v_0_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| if !(0 <= c && c < d) { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsInBounds (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(0 <= c && c < d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(0 <= c && c < d) |
| return true |
| } |
| // match: (IsInBounds (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(0 <= c && c < d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(0 <= c && c < d) |
| return true |
| } |
| // match: (IsInBounds (Mod32u _ y) y) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpMod32u { |
| break |
| } |
| y := v_0.Args[1] |
| if y != v.Args[1] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsInBounds (Mod64u _ y) y) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpMod64u { |
| break |
| } |
| y := v_0.Args[1] |
| if y != v.Args[1] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (IsSliceInBounds x x) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| x := v.Args[0] |
| if x != v.Args[1] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) |
| // cond: 0 <= c && c <= d |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd32 { |
| break |
| } |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst32 { |
| break |
| } |
| c := v_0_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| if !(0 <= c && c <= d) { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) |
| // cond: 0 <= c && c <= d |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpAnd64 { |
| break |
| } |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpConst64 { |
| break |
| } |
| c := v_0_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| if !(0 <= c && c <= d) { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsSliceInBounds (Const32 [0]) _) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsSliceInBounds (Const64 [0]) _) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| // match: (IsSliceInBounds (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(0 <= c && c <= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(0 <= c && c <= d) |
| return true |
| } |
| // match: (IsSliceInBounds (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(0 <= c && c <= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(0 <= c && c <= d) |
| return true |
| } |
| // match: (IsSliceInBounds (SliceLen x) (SliceCap x)) |
| // cond: |
| // result: (ConstBool [1]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpSliceLen { |
| break |
| } |
| x := v_0.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpSliceCap { |
| break |
| } |
| if x != v_1.Args[0] { |
| break |
| } |
| v.reset(OpConstBool) |
| v.AuxInt = 1 |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Leq16 (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c <= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c <= d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Leq16U (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint16(c) <= uint16(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint16(c) <= uint16(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Leq32 (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c <= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c <= d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Leq32U (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint32(c) <= uint32(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint32(c) <= uint32(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Leq64 (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c <= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c <= d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Leq64U (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint64(c) <= uint64(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint64(c) <= uint64(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Leq8 (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c <= d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c <= d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Leq8U (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint8(c) <= uint8(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint8(c) <= uint8(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Less16 (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c < d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c < d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Less16U (Const16 [c]) (Const16 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint16(c) < uint16(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint16(c) < uint16(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Less32 (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c < d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c < d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Less32U (Const32 [c]) (Const32 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint32(c) < uint32(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint32(c) < uint32(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Less64 (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c < d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c < d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Less64U (Const64 [c]) (Const64 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint64(c) < uint64(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst64 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint64(c) < uint64(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Less8 (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(c < d)]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(c < d) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Less8U (Const8 [c]) (Const8 [d])) |
| // cond: |
| // result: (ConstBool [b2i(uint8(c) < uint8(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst8 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConstBool) |
| v.AuxInt = b2i(uint8(c) < uint8(d)) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Load <t1> p1 (Store [w] p2 x _)) |
| // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() |
| // result: x |
| for { |
| t1 := v.Type |
| p1 := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpStore { |
| break |
| } |
| w := v_1.AuxInt |
| p2 := v_1.Args[0] |
| x := v_1.Args[1] |
| if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (Load <t> _ _) |
| // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) |
| // result: (StructMake0) |
| for { |
| t := v.Type |
| if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake0) |
| return true |
| } |
| // match: (Load <t> ptr mem) |
| // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) |
| // result: (StructMake1 (Load <t.FieldType(0)> ptr mem)) |
| for { |
| t := v.Type |
| ptr := v.Args[0] |
| mem := v.Args[1] |
| if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake1) |
| v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) |
| v0.AddArg(ptr) |
| v0.AddArg(mem) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Load <t> ptr mem) |
| // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) |
| // result: (StructMake2 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) |
| for { |
| t := v.Type |
| ptr := v.Args[0] |
| mem := v.Args[1] |
| if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake2) |
| v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) |
| v0.AddArg(ptr) |
| v0.AddArg(mem) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) |
| v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) |
| v2.AuxInt = t.FieldOff(1) |
| v2.AddArg(ptr) |
| v1.AddArg(v2) |
| v1.AddArg(mem) |
| v.AddArg(v1) |
| return true |
| } |
| // match: (Load <t> ptr mem) |
| // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) |
| // result: (StructMake3 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) |
| for { |
| t := v.Type |
| ptr := v.Args[0] |
| mem := v.Args[1] |
| if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake3) |
| v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) |
| v0.AddArg(ptr) |
| v0.AddArg(mem) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) |
| v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) |
| v2.AuxInt = t.FieldOff(1) |
| v2.AddArg(ptr) |
| v1.AddArg(v2) |
| v1.AddArg(mem) |
| v.AddArg(v1) |
| v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) |
| v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) |
| v4.AuxInt = t.FieldOff(2) |
| v4.AddArg(ptr) |
| v3.AddArg(v4) |
| v3.AddArg(mem) |
| v.AddArg(v3) |
| return true |
| } |
| // match: (Load <t> ptr mem) |
| // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) |
| // result: (StructMake4 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) |
| for { |
| t := v.Type |
| ptr := v.Args[0] |
| mem := v.Args[1] |
| if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpStructMake4) |
| v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) |
| v0.AddArg(ptr) |
| v0.AddArg(mem) |
| v.AddArg(v0) |
| v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) |
| v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) |
| v2.AuxInt = t.FieldOff(1) |
| v2.AddArg(ptr) |
| v1.AddArg(v2) |
| v1.AddArg(mem) |
| v.AddArg(v1) |
| v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) |
| v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) |
| v4.AuxInt = t.FieldOff(2) |
| v4.AddArg(ptr) |
| v3.AddArg(v4) |
| v3.AddArg(mem) |
| v.AddArg(v3) |
| v5 := b.NewValue0(v.Line, OpLoad, t.FieldType(3)) |
| v6 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) |
| v6.AuxInt = t.FieldOff(3) |
| v6.AddArg(ptr) |
| v5.AddArg(v6) |
| v5.AddArg(mem) |
| v.AddArg(v5) |
| return true |
| } |
| // match: (Load <t> _ _) |
| // cond: t.IsArray() && t.NumElem() == 0 |
| // result: (ArrayMake0) |
| for { |
| t := v.Type |
| if !(t.IsArray() && t.NumElem() == 0) { |
| break |
| } |
| v.reset(OpArrayMake0) |
| return true |
| } |
| // match: (Load <t> ptr mem) |
| // cond: t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t) |
| // result: (ArrayMake1 (Load <t.ElemType()> ptr mem)) |
| for { |
| t := v.Type |
| ptr := v.Args[0] |
| mem := v.Args[1] |
| if !(t.IsArray() && t.NumElem() == 1 && config.fe.CanSSA(t)) { |
| break |
| } |
| v.reset(OpArrayMake1) |
| v0 := b.NewValue0(v.Line, OpLoad, t.ElemType()) |
| v0.AddArg(ptr) |
| v0.AddArg(mem) |
| v.AddArg(v0) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Lsh16x16 <t> x (Const16 [c])) |
| // cond: |
| // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpLsh16x64) |
| v.AddArg(x) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = int64(uint16(c)) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Lsh16x16 (Const16 [0]) _) |
| // cond: |
| // result: (Const16 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst16) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Lsh16x32 <t> x (Const32 [c])) |
| // cond: |
| // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpLsh16x64) |
| v.AddArg(x) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = int64(uint32(c)) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Lsh16x32 (Const16 [0]) _) |
| // cond: |
| // result: (Const16 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst16) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) |
| // cond: |
| // result: (Const16 [int64(int16(c) << uint64(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConst16) |
| v.AuxInt = int64(int16(c) << uint64(d)) |
| return true |
| } |
| // match: (Lsh16x64 x (Const64 [0])) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| if v_1.AuxInt != 0 { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
| // match: (Lsh16x64 (Const16 [0]) _) |
| // cond: |
| // result: (Const16 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst16) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (Lsh16x64 _ (Const64 [c])) |
| // cond: uint64(c) >= 16 |
| // result: (Const16 [0]) |
| for { |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c := v_1.AuxInt |
| if !(uint64(c) >= 16) { |
| break |
| } |
| v.reset(OpConst16) |
| v.AuxInt = 0 |
| return true |
| } |
| // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) |
| // cond: !uaddOvf(c,d) |
| // result: (Lsh16x64 x (Const64 <t> [c+d])) |
| for { |
| t := v.Type |
| v_0 := v.Args[0] |
| if v_0.Op != OpLsh16x64 { |
| break |
| } |
| x := v_0.Args[0] |
| v_0_1 := v_0.Args[1] |
| if v_0_1.Op != OpConst64 { |
| break |
| } |
| c := v_0_1.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| if !(!uaddOvf(c, d)) { |
| break |
| } |
| v.reset(OpLsh16x64) |
| v.AddArg(x) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = c + d |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) |
| // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) |
| // result: (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpRsh16Ux64 { |
| break |
| } |
| v_0_0 := v_0.Args[0] |
| if v_0_0.Op != OpLsh16x64 { |
| break |
| } |
| x := v_0_0.Args[0] |
| v_0_0_1 := v_0_0.Args[1] |
| if v_0_0_1.Op != OpConst64 { |
| break |
| } |
| c1 := v_0_0_1.AuxInt |
| v_0_1 := v_0.Args[1] |
| if v_0_1.Op != OpConst64 { |
| break |
| } |
| c2 := v_0_1.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| c3 := v_1.AuxInt |
| if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { |
| break |
| } |
| v.reset(OpLsh16x64) |
| v.AddArg(x) |
| v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) |
| v0.AuxInt = c1 - c2 + c3 |
| v.AddArg(v0) |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Lsh16x8 <t> x (Const8 [c])) |
| // cond: |
| // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst8 { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpLsh16x64) |
| v.AddArg(x) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = int64(uint8(c)) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Lsh16x8 (Const16 [0]) _) |
| // cond: |
| // result: (Const16 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst16 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst16) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Lsh32x16 <t> x (Const16 [c])) |
| // cond: |
| // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst16 { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpLsh32x64) |
| v.AddArg(x) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = int64(uint16(c)) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Lsh32x16 (Const32 [0]) _) |
| // cond: |
| // result: (Const32 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst32) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Lsh32x32 <t> x (Const32 [c])) |
| // cond: |
| // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) |
| for { |
| t := v.Type |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst32 { |
| break |
| } |
| c := v_1.AuxInt |
| v.reset(OpLsh32x64) |
| v.AddArg(x) |
| v0 := b.NewValue0(v.Line, OpConst64, t) |
| v0.AuxInt = int64(uint32(c)) |
| v.AddArg(v0) |
| return true |
| } |
| // match: (Lsh32x32 (Const32 [0]) _) |
| // cond: |
| // result: (Const32 [0]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| if v_0.AuxInt != 0 { |
| break |
| } |
| v.reset(OpConst32) |
| v.AuxInt = 0 |
| return true |
| } |
| return false |
| } |
| func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { |
| b := v.Block |
| _ = b |
| // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) |
| // cond: |
| // result: (Const32 [int64(int32(c) << uint64(d))]) |
| for { |
| v_0 := v.Args[0] |
| if v_0.Op != OpConst32 { |
| break |
| } |
| c := v_0.AuxInt |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| d := v_1.AuxInt |
| v.reset(OpConst32) |
| v.AuxInt = int64(int32(c) << uint64(d)) |
| return true |
| } |
| // match: (Lsh32x64 x (Const64 [0])) |
| // cond: |
| // result: x |
| for { |
| x := v.Args[0] |
| v_1 := v.Args[1] |
| if v_1.Op != OpConst64 { |
| break |
| } |
| if v_1.AuxInt != 0 { |
| break |
| } |
| v.reset(OpCopy) |
| v.Type = x.Type |
| v.AddArg(x) |
| return true |
| } |
|