regexp: port RE2's bitstate backtracker to the regexp package

This is a port of RE2's bitstate backtracker, which triggers under
the same conditions that the RE2 backtracker triggers.  However I wasn't
sure how to port over some of the optimizations in the RE2 backtracker,
and there is a ~2% penalty on benchmarks that don't trigger the backtracker.

benchmark                                 old ns/op      new ns/op      delta
BenchmarkLiteral                          312            189            -39.42%
BenchmarkNotLiteral                       4435           3001           -32.33%
BenchmarkMatchClass                       5758           4378           -23.97%
BenchmarkMatchClass_InRange               5385           4084           -24.16%
BenchmarkReplaceAll                       5291           3505           -33.76%
BenchmarkAnchoredLiteralShortNonMatch     190            200            +5.26%
BenchmarkAnchoredLiteralLongNonMatch      189            194            +2.65%
BenchmarkAnchoredShortMatch               479            304            -36.53%
BenchmarkAnchoredLongMatch                478            499            +4.39%
BenchmarkOnePassShortA                    791            798            +0.88%
BenchmarkNotOnePassShortA                 3202           1571           -50.94%
BenchmarkOnePassShortB                    614            633            +3.09%
BenchmarkNotOnePassShortB                 2685           881            -67.19%
BenchmarkOnePassLongPrefix                152            154            +1.32%
BenchmarkOnePassLongNotPrefix             505            533            +5.54%
BenchmarkMatchEasy0_32                    139            171            +23.02%
BenchmarkMatchEasy0_1K                    653            1797           +175.19%
BenchmarkMatchEasy0_32K                   12032          13346          +10.92%
BenchmarkMatchEasy0_1M                    462882         461272         -0.35%
BenchmarkMatchEasy0_32M                   15015339       15365238       +2.33%
BenchmarkMatchEasy1_32                    122            168            +37.70%
BenchmarkMatchEasy1_1K                    3339           2612           -21.77%
BenchmarkMatchEasy1_32K                   72330          71721          -0.84%
BenchmarkMatchEasy1_1M                    2545410        2652284        +4.20%
BenchmarkMatchEasy1_32M                   80072063       82609750       +3.17%
BenchmarkMatchMedium_32                   2359           1980           -16.07%
BenchmarkMatchMedium_1K                   75939          58593          -22.84%
BenchmarkMatchMedium_32K                  2450907        2501106        +2.05%
BenchmarkMatchMedium_1M                   78707697       80174418       +1.86%
BenchmarkMatchMedium_32M                  2535146010     2570896441     +1.41%
BenchmarkMatchHard_32                     4297           2960           -31.11%
BenchmarkMatchHard_1K                     133592         88997          -33.38%
BenchmarkMatchHard_32K                    4240445        4336907        +2.27%
BenchmarkMatchHard_1M                     136187006      139350238      +2.32%
BenchmarkMatchHard_32M                    4350855890     4478537306     +2.93%

benchmark                    old MB/s     new MB/s     speedup
BenchmarkMatchEasy0_32       228.74       186.11       0.81x
BenchmarkMatchEasy0_1K       1565.91      569.64       0.36x
BenchmarkMatchEasy0_32K      2723.31      2455.10      0.90x
BenchmarkMatchEasy0_1M       2265.32      2273.22      1.00x
BenchmarkMatchEasy0_32M      2234.68      2183.79      0.98x
BenchmarkMatchEasy1_32       261.08       190.22       0.73x
BenchmarkMatchEasy1_1K       306.59       391.91       1.28x
BenchmarkMatchEasy1_32K      453.03       456.88       1.01x
BenchmarkMatchEasy1_1M       411.95       395.35       0.96x
BenchmarkMatchEasy1_32M      419.05       406.18       0.97x
BenchmarkMatchMedium_32      13.56        16.16        1.19x
BenchmarkMatchMedium_1K      13.48        17.48        1.30x
BenchmarkMatchMedium_32K     13.37        13.10        0.98x
BenchmarkMatchMedium_1M      13.32        13.08        0.98x
BenchmarkMatchMedium_32M     13.24        13.05        0.99x
BenchmarkMatchHard_32        7.45         10.81        1.45x
BenchmarkMatchHard_1K        7.67         11.51        1.50x
BenchmarkMatchHard_32K       7.73         7.56         0.98x
BenchmarkMatchHard_1M        7.70         7.52         0.98x
BenchmarkMatchHard_32M       7.71         7.49         0.97x

Fixes #4154

Change-Id: Iff7fb9507f0872b320d08afc08679751ed1b28bc
Reviewed-on: https://go-review.googlesource.com/2153
Reviewed-by: Russ Cox <rsc@golang.org>
diff --git a/src/regexp/backtrack.go b/src/regexp/backtrack.go
new file mode 100644
index 0000000..824014b
--- /dev/null
+++ b/src/regexp/backtrack.go
@@ -0,0 +1,351 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// backtrack is a regular expression search with submatch
+// tracking for small regular expressions and texts. It allocates
+// a bit vector with (length of input) * (length of prog) bits,
+// to make sure it never explores the same (character position, instruction)
+// state multiple times. This limits the search to run in time linear in
+// the length of the test.
+//
+// backtrack is a fast replacement for the NFA code on small
+// regexps when onepass cannot be used.
+
+package regexp
+
+import "regexp/syntax"
+
+// A job is an entry on the backtracker's job stack. It holds
+// the instruction pc and the position in the input.
+type job struct {
+	pc  uint32
+	arg int
+	pos int
+}
+
+const (
+	visitedBits        = 32
+	maxBacktrackProg   = 500        // len(prog.Inst) <= max
+	maxBacktrackVector = 256 * 1024 // bit vector size <= max (bits)
+)
+
+// bitState holds state for the backtracker.
+type bitState struct {
+	prog *syntax.Prog
+
+	end     int
+	cap     []int
+	reqcap  bool // whether any captures are requested
+	input   input
+	jobs    []job
+	visited []uint32
+}
+
+var notBacktrack *bitState = nil
+
+// maxBitStateLen returns the maximum length of a string to search with
+// the backtracker using prog.
+func maxBitStateLen(prog *syntax.Prog) int {
+	return maxBacktrackVector / len(prog.Inst)
+}
+
+// newBitState returns a new bitState for the given prog,
+// or notBacktrack if the size of the prog exceeds the maximum size that
+// the backtracker will be run for.
+func newBitState(prog *syntax.Prog) *bitState {
+	if len(prog.Inst) > maxBacktrackProg {
+		return notBacktrack
+	}
+	return &bitState{
+		prog: prog,
+	}
+}
+
+// reset resets the state of the backtracker.
+// end is the end position in the input. ncap and reqcap are the number
+// of the machine's capture registers and the number of user-requested
+// captures respectively.
+func (b *bitState) reset(end int, ncap int, reqcap int) {
+	b.end = end
+	b.reqcap = reqcap > 0
+
+	if cap(b.jobs) == 0 {
+		b.jobs = make([]job, 0, 256)
+	} else {
+		b.jobs = b.jobs[:0]
+	}
+
+	visitedSize := (len(b.prog.Inst)*(end+1) + visitedBits - 1) / visitedBits
+	if cap(b.visited) < visitedSize {
+		b.visited = make([]uint32, visitedSize, maxBacktrackVector/visitedBits)
+	} else {
+		b.visited = b.visited[:visitedSize]
+		for i := range b.visited {
+			b.visited[i] = 0
+		}
+	}
+
+	if len(b.cap) < ncap {
+		b.cap = make([]int, ncap)
+	}
+	for i := range b.cap {
+		b.cap[i] = -1
+	}
+}
+
+// shouldVisit reports whether the combination of (pc, pos) has not
+// been visited yet.
+func (b *bitState) shouldVisit(pc uint32, pos int) bool {
+	n := uint(int(pc)*(b.end+1) + pos)
+	if b.visited[n/visitedBits]&(1<<(n&(visitedBits-1))) != 0 {
+		return false
+	}
+	b.visited[n/visitedBits] |= 1 << (n & (visitedBits - 1))
+	return true
+}
+
+// push pushes (pc, pos, arg) onto the job stack if it should be
+// visited.
+func (b *bitState) push(pc uint32, pos int, arg int) {
+	if b.prog.Inst[pc].Op == syntax.InstFail {
+		return
+	}
+
+	// Only check shouldVisit when arg == 0.
+	// When arg > 0, we are continuing a previous visit.
+	if arg == 0 && !b.shouldVisit(pc, pos) {
+		return
+	}
+
+	b.jobs = append(b.jobs, job{pc: pc, arg: arg, pos: pos})
+}
+
+// tryBacktrack runs a backtracking search starting at pos.
+func (m *machine) tryBacktrack(b *bitState, i input, pc uint32, pos int) bool {
+	longest := m.re.longest
+	m.matched = false
+
+	b.push(pc, pos, 0)
+	for len(b.jobs) > 0 {
+		l := len(b.jobs) - 1
+		// Pop job off the stack.
+		pc := b.jobs[l].pc
+		pos := b.jobs[l].pos
+		arg := b.jobs[l].arg
+		b.jobs = b.jobs[:l]
+
+		// Optimization: rather than push and pop,
+		// code that is going to Push and continue
+		// the loop simply updates ip, p, and arg
+		// and jumps to CheckAndLoop.  We have to
+		// do the ShouldVisit check that Push
+		// would have, but we avoid the stack
+		// manipulation.
+		goto Skip
+	CheckAndLoop:
+		if !b.shouldVisit(pc, pos) {
+			continue
+		}
+	Skip:
+
+		inst := b.prog.Inst[pc]
+
+		switch inst.Op {
+		default:
+			panic("bad inst")
+		case syntax.InstFail:
+			panic("unexpected InstFail")
+		case syntax.InstAlt:
+			// Cannot just
+			//   b.push(inst.Out, pos, 0)
+			//   b.push(inst.Arg, pos, 0)
+			// If during the processing of inst.Out, we encounter
+			// inst.Arg via another path, we want to process it then.
+			// Pushing it here will inhibit that. Instead, re-push
+			// inst with arg==1 as a reminder to push inst.Arg out
+			// later.
+			switch arg {
+			case 0:
+				b.push(pc, pos, 1)
+				pc = inst.Out
+				goto CheckAndLoop
+			case 1:
+				// Finished inst.Out; try inst.Arg.
+				arg = 0
+				pc = inst.Arg
+				goto CheckAndLoop
+			}
+			panic("bad arg in InstAlt")
+
+		case syntax.InstAltMatch:
+			// One opcode consumes runes; the other leads to match.
+			switch b.prog.Inst[inst.Out].Op {
+			case syntax.InstRune, syntax.InstRune1, syntax.InstRuneAny, syntax.InstRuneAnyNotNL:
+				// inst.Arg is the match.
+				b.push(inst.Arg, pos, 0)
+				pc = inst.Arg
+				pos = b.end
+				goto CheckAndLoop
+			}
+			// inst.Out is the match - non-greedy
+			b.push(inst.Out, b.end, 0)
+			pc = inst.Out
+			goto CheckAndLoop
+
+		case syntax.InstRune:
+			r, width := i.step(pos)
+			if !inst.MatchRune(r) {
+				continue
+			}
+			pos += width
+			pc = inst.Out
+			goto CheckAndLoop
+
+		case syntax.InstRune1:
+			r, width := i.step(pos)
+			if r != inst.Rune[0] {
+				continue
+			}
+			pos += width
+			pc = inst.Out
+			goto CheckAndLoop
+
+		case syntax.InstRuneAnyNotNL:
+			r, width := i.step(pos)
+			if r == '\n' || r == endOfText {
+				continue
+			}
+			pos += width
+			pc = inst.Out
+			goto CheckAndLoop
+
+		case syntax.InstRuneAny:
+			r, width := i.step(pos)
+			if r == endOfText {
+				continue
+			}
+			pos += width
+			pc = inst.Out
+			goto CheckAndLoop
+
+		case syntax.InstCapture:
+			switch arg {
+			case 0:
+				if 0 <= inst.Arg && inst.Arg < uint32(len(b.cap)) {
+					// Capture pos to register, but save old value.
+					b.push(pc, b.cap[inst.Arg], 1) // come back when we're done.
+					b.cap[inst.Arg] = pos
+				}
+				pc = inst.Out
+				goto CheckAndLoop
+			case 1:
+				// Finished inst.Out; restore the old value.
+				b.cap[inst.Arg] = pos
+				continue
+
+			}
+			panic("bad arg in InstCapture")
+			continue
+
+		case syntax.InstEmptyWidth:
+			if syntax.EmptyOp(inst.Arg)&^i.context(pos) != 0 {
+				continue
+			}
+			pc = inst.Out
+			goto CheckAndLoop
+
+		case syntax.InstNop:
+			pc = inst.Out
+			goto CheckAndLoop
+
+		case syntax.InstMatch:
+			// We found a match. If the caller doesn't care
+			// where the match is, no point going further.
+			if !b.reqcap {
+				m.matched = true
+				return m.matched
+			}
+
+			// Record best match so far.
+			// Only need to check end point, because this entire
+			// call is only considering one start position.
+			b.cap[1] = pos
+			if !m.matched || (longest && pos > 0 && pos > m.matchcap[1]) {
+				copy(m.matchcap, b.cap)
+			}
+			m.matched = true
+
+			// If going for first match, we're done.
+			if !longest {
+				return m.matched
+			}
+
+			// If we used the entire text, no longer match is possible.
+			if pos == b.end {
+				return m.matched
+			}
+
+			// Otherwise, continue on in hope of a longer match.
+			continue
+		}
+		panic("unreachable")
+	}
+
+	return m.matched
+}
+
+// backtrack runs a backtracking search of prog on the input starting at pos.
+func (m *machine) backtrack(i input, pos int, end int, reqcap int) bool {
+	if !i.canCheckPrefix() {
+		panic("backtrack called for a RuneReader")
+	}
+
+	startCond := m.re.cond
+	if startCond == ^syntax.EmptyOp(0) { // impossible
+		return false
+	}
+	if startCond&syntax.EmptyBeginText != 0 && pos != 0 {
+		// Anchored match, past beginning of text.
+		return false
+	}
+
+	b := m.b
+	b.reset(end, len(m.matchcap), reqcap)
+
+	for i := range m.matchcap {
+		m.matchcap[i] = -1
+	}
+
+	// Anchored search must start at the beginning of the input
+	if startCond&syntax.EmptyBeginText != 0 {
+		b.cap[0] = pos
+		return m.tryBacktrack(b, i, uint32(m.p.Start), pos)
+	}
+
+	// Unanchored search, starting from each possible text position.
+	// Notice that we have to try the empty string at the end of
+	// the text, so the loop condition is pos <= end, not pos < end.
+	// This looks like it's quadratic in the size of the text,
+	// but we are not clearing visited between calls to TrySearch,
+	// so no work is duplicated and it ends up still being linear.
+	width := -1
+	for ; pos <= end && width != 0; pos += width {
+		if len(m.re.prefix) > 0 {
+			// Match requires literal prefix; fast search for it.
+			advance := i.index(m.re, pos)
+			if advance < 0 {
+				return false
+			}
+			pos += advance
+		}
+
+		b.cap[0] = pos
+		if m.tryBacktrack(b, i, uint32(m.p.Start), pos) {
+			// Match must be leftmost; done.
+			return true
+		}
+		_, width = i.step(pos)
+	}
+	return false
+}