sweet: add a short mode for testing
Right now the full benchmark suite takes quite a while to run once:
up to 20 minutes. This change introduces a short mode that reduces that
down to about 4 minutes and 30 seconds. With some judicious sharding,
that can likely be brought down further.
Change-Id: Idf2e6724347bc86450bff904bde7161312bd0cc3
Reviewed-on: https://go-review.googlesource.com/c/benchmarks/+/378275
Reviewed-by: Michael Pratt <mpratt@google.com>
Trust: Michael Knyszek <mknyszek@google.com>
Run-TryBot: Michael Knyszek <mknyszek@google.com>
TryBot-Result: Gopher Robot <gobot@golang.org>
diff --git a/sweet/benchmarks/bleve-query/main.go b/sweet/benchmarks/bleve-query/main.go
index 110b5f3..a616049 100644
--- a/sweet/benchmarks/bleve-query/main.go
+++ b/sweet/benchmarks/bleve-query/main.go
@@ -18,6 +18,12 @@
_ "github.com/blevesearch/bleve/analysis/analyzer/keyword"
)
+var iterations int
+
+func init() {
+ flag.IntVar(&iterations, "iterations", 50, "number of times to iterate over the list of query terms")
+}
+
func parseFlags() error {
flag.Parse()
if flag.NArg() != 1 {
@@ -26,13 +32,13 @@
return nil
}
-func run(idxdir string) error {
+func run(idxdir string, iterations int) error {
index, err := bleve.Open(idxdir)
if err != nil {
return err
}
return driver.RunBenchmark("BleveQuery", func(_ *driver.B) error {
- for j := 0; j < 50; j++ {
+ for j := 0; j < iterations; j++ {
for _, term := range terms {
query := bleve.NewTermQuery(term)
query.SetField("Text")
@@ -52,7 +58,7 @@
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
- if err := run(flag.Arg(0)); err != nil {
+ if err := run(flag.Arg(0), iterations); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
diff --git a/sweet/benchmarks/fogleman-fauxgl/main.go b/sweet/benchmarks/fogleman-fauxgl/main.go
index 4b969e5..012d924 100644
--- a/sweet/benchmarks/fogleman-fauxgl/main.go
+++ b/sweet/benchmarks/fogleman-fauxgl/main.go
@@ -17,6 +17,12 @@
var im image.Image
+var imagesPerRotation int
+
+func init() {
+ flag.IntVar(&imagesPerRotation, "images-per-rotation", 72, "number of images per rotation to generate")
+}
+
func main() {
driver.SetFlags(flag.CommandLine)
flag.Parse()
@@ -25,6 +31,7 @@
fmt.Fprintln(os.Stderr, "expected input STL file")
os.Exit(1)
}
+ inc := 360 / imagesPerRotation
// Load mesh into animation structure.
anim, err := animatebench.Load(flag.Arg(0))
@@ -35,7 +42,7 @@
err = driver.RunBenchmark("FoglemanFauxGLRenderRotateBoat", func(b *driver.B) error {
runtime.GC()
b.ResetTimer()
- for i := 0; i < 360; i += 5 {
+ for i := 0; i < 360; i += inc {
im = anim.RenderNext()
}
return nil
diff --git a/sweet/benchmarks/gopher-lua/main.go b/sweet/benchmarks/gopher-lua/main.go
index 392bbb4..88c50c7 100644
--- a/sweet/benchmarks/gopher-lua/main.go
+++ b/sweet/benchmarks/gopher-lua/main.go
@@ -19,6 +19,12 @@
lua "github.com/yuin/gopher-lua"
)
+var short bool
+
+func init() {
+ flag.BoolVar(&short, "short", false, "whether to run a short version of this benchmark")
+}
+
func parseFlags() error {
flag.Parse()
if flag.NArg() != 2 {
@@ -61,6 +67,9 @@
if err := s.CallByParam(freq, input, lua.LNumber(2)); err != nil {
return err
}
+ if short {
+ return nil
+ }
if err := s.CallByParam(count, input, lua.LString("GGT")); err != nil {
return err
}
diff --git a/sweet/benchmarks/gvisor/main.go b/sweet/benchmarks/gvisor/main.go
index 0a21508..704fec9 100644
--- a/sweet/benchmarks/gvisor/main.go
+++ b/sweet/benchmarks/gvisor/main.go
@@ -23,6 +23,7 @@
runscPath string
assetsDir string
tmpDir string
+ short bool
}
var cliCfg config
@@ -32,6 +33,7 @@
flag.StringVar(&cliCfg.runscPath, "runsc", "", "path to the runsc binary")
flag.StringVar(&cliCfg.assetsDir, "assets-dir", "", "path to the directory containing benchmark root filesystems")
flag.StringVar(&cliCfg.tmpDir, "tmp", "", "path to a temporary working directory")
+ flag.BoolVar(&cliCfg.short, "short", false, "whether to run a short version of the benchmarks")
}
type benchmark interface {
@@ -39,14 +41,20 @@
run(*config, io.Writer) error
}
-// List of all benchmarks.
-var benchmarks = []benchmark{
- startup{},
- systemCall{500000},
- httpServer{20 * time.Second},
-}
-
func main1() error {
+ benchmarks := []benchmark{
+ startup{},
+ systemCall{500000},
+ httpServer{20 * time.Second},
+ }
+ if cliCfg.short {
+ benchmarks = []benchmark{
+ startup{},
+ systemCall{500},
+ httpServer{1 * time.Second},
+ }
+ }
+
// Run each benchmark once.
for _, bench := range benchmarks {
// Run the benchmark command under runsc.
diff --git a/sweet/benchmarks/internal/driver/driver.go b/sweet/benchmarks/internal/driver/driver.go
index c72093b..015691a 100644
--- a/sweet/benchmarks/internal/driver/driver.go
+++ b/sweet/benchmarks/internal/driver/driver.go
@@ -28,6 +28,7 @@
memProfileDir string
perfDir string
perfFlags string
+ short bool
)
func SetFlags(f *flag.FlagSet) {
diff --git a/sweet/benchmarks/tile38/main.go b/sweet/benchmarks/tile38/main.go
index dc92162..f631724 100644
--- a/sweet/benchmarks/tile38/main.go
+++ b/sweet/benchmarks/tile38/main.go
@@ -30,12 +30,12 @@
host string
port int
seed int64
- iter int
serverBin string
dataPath string
tmpDir string
serverProcs int
isProfiling bool
+ short bool
}
func (c *config) profilePath(typ driver.ProfileType) string {
@@ -60,10 +60,10 @@
flag.StringVar(&cliCfg.host, "host", "", "hostname of tile38 server")
flag.IntVar(&cliCfg.port, "port", 9851, "port for tile38 server")
flag.Int64Var(&cliCfg.seed, "seed", 0, "seed for PRNG")
- flag.IntVar(&cliCfg.iter, "iter", 60*50000, "how many iterations to run (for profiling)")
flag.StringVar(&cliCfg.serverBin, "server", "", "path to tile38 server binary")
flag.StringVar(&cliCfg.dataPath, "data", "", "path to tile38 server data")
flag.StringVar(&cliCfg.tmpDir, "tmp", "", "path to temporary directory")
+ flag.BoolVar(&cliCfg.short, "short", false, "whether to run a short version of this benchmark")
// Grab the number of procs we have and give ourselves only 1/4 of those.
procs := runtime.GOMAXPROCS(-1)
@@ -313,8 +313,12 @@
driver.BenchmarkPID(srvCmd.Process.Pid),
driver.DoPerf(true),
}
+ iters := 60 * 50000
+ if cfg.short {
+ iters = 1000
+ }
return driver.RunBenchmark(bench.name(), func(d *driver.B) error {
- return bench.run(d, cfg.host, cfg.port, cfg.serverProcs, cfg.iter)
+ return bench.run(d, cfg.host, cfg.port, cfg.serverProcs, iters)
}, opts...)
}
@@ -327,6 +331,10 @@
for _, typ := range driver.ProfileTypes {
cliCfg.isProfiling = cliCfg.isProfiling || driver.ProfilingEnabled(typ)
}
+ benchmarks := benchmarks
+ if cliCfg.short {
+ benchmarks = benchmarks[:1]
+ }
for _, bench := range benchmarks {
if err := runOne(bench, &cliCfg); err != nil {
fmt.Fprintf(os.Stderr, "error: %v\n", err)
diff --git a/sweet/cmd/sweet/benchmark.go b/sweet/cmd/sweet/benchmark.go
index 1779dd2..50bc6c5 100644
--- a/sweet/cmd/sweet/benchmark.go
+++ b/sweet/cmd/sweet/benchmark.go
@@ -209,6 +209,7 @@
BinDir: binDir,
SrcDir: srcDir,
BenchDir: benchDir,
+ Short: r.short,
}
if err := b.harness.Build(cfg, &bcfg); err != nil {
return fmt.Errorf("build %s for %s: %v", b.name, cfg.Name, err)
@@ -261,6 +262,7 @@
AssetsDir: assetsDir,
Args: args,
Results: results,
+ Short: r.short,
})
}
diff --git a/sweet/cmd/sweet/run.go b/sweet/cmd/sweet/run.go
index c03b3cc..4239149 100644
--- a/sweet/cmd/sweet/run.go
+++ b/sweet/cmd/sweet/run.go
@@ -50,6 +50,7 @@
memProfile bool
perf bool
perfFlags string
+ short bool
}
type runCmd struct {
@@ -124,6 +125,7 @@
f.BoolVar(&c.quiet, "quiet", false, "whether to suppress activity output on stderr (no effect on -shell)")
f.BoolVar(&c.printCmd, "shell", false, "whether to print the commands being executed to stdout")
f.BoolVar(&c.stopOnError, "stop-on-error", false, "whether to stop running benchmarks if an error occurs or a benchmark fails")
+ f.BoolVar(&c.short, "short", false, "whether to run a short version of the benchmarks for testing")
f.Var(&c.toRun, "run", "benchmark group or comma-separated list of benchmarks to run")
}
diff --git a/sweet/common/harness.go b/sweet/common/harness.go
index d5afb2a..525235a 100644
--- a/sweet/common/harness.go
+++ b/sweet/common/harness.go
@@ -20,6 +20,11 @@
// BenchDir is the path to the benchmark's source directory in the Sweet
// repository.
BenchDir string
+
+ // Short indicates whether or not to run a short version of the benchmarks
+ // for testing. Guaranteed to be the same as RunConfig.Short for any
+ // RunConfig.
+ Short bool
}
type RunConfig struct {
@@ -50,6 +55,11 @@
// Results is the file to which benchmark results should be appended
// in the Go benchmark format.
Results *os.File
+
+ // Short indicates whether or not to run a short version of the benchmarks
+ // for testing. Guaranteed to be the same as BuildConfig.Short for any
+ // BuildConfig.
+ Short bool
}
type Harness interface {
diff --git a/sweet/harnesses/go-build.go b/sweet/harnesses/go-build.go
index 16eb7ca..2e57a80 100644
--- a/sweet/harnesses/go-build.go
+++ b/sweet/harnesses/go-build.go
@@ -72,7 +72,12 @@
}
func (h GoBuild) Build(cfg *common.Config, bcfg *common.BuildConfig) error {
- for _, bench := range buildBenchmarks {
+ benchmarks := buildBenchmarks
+ if bcfg.Short {
+ // Do only the pkgsite benchmark.
+ benchmarks = []*buildBenchmark{buildBenchmarks[2]}
+ }
+ for _, bench := range benchmarks {
// Generate a symlink to the repository and put it in bin.
// It's not a binary, but it's the only place we can put it
// and still access it in Run.
@@ -94,7 +99,12 @@
}
func (h GoBuild) Run(cfg *common.Config, rcfg *common.RunConfig) error {
- for _, bench := range buildBenchmarks {
+ benchmarks := buildBenchmarks
+ if rcfg.Short {
+ // Do only the pkgsite benchmark.
+ benchmarks = []*buildBenchmark{buildBenchmarks[2]}
+ }
+ for _, bench := range benchmarks {
cmd := exec.Command(
filepath.Join(rcfg.BinDir, "go-build-bench"),
append(rcfg.Args, []string{
diff --git a/sweet/harnesses/gvisor.go b/sweet/harnesses/gvisor.go
index 2b10b6f..dce4612 100644
--- a/sweet/harnesses/gvisor.go
+++ b/sweet/harnesses/gvisor.go
@@ -57,13 +57,17 @@
}
func (h GVisor) Run(cfg *common.Config, rcfg *common.RunConfig) error {
+ args := append(rcfg.Args, []string{
+ "-runsc", filepath.Join(rcfg.BinDir, "runsc"),
+ "-assets-dir", rcfg.AssetsDir,
+ "-tmp", rcfg.TmpDir,
+ }...)
+ if rcfg.Short {
+ args = append(args, "-short")
+ }
cmd := exec.Command(
filepath.Join(rcfg.BinDir, "gvisor-bench"),
- append(rcfg.Args, []string{
- "-runsc", filepath.Join(rcfg.BinDir, "runsc"),
- "-assets-dir", rcfg.AssetsDir,
- "-tmp", rcfg.TmpDir,
- }...)...,
+ args...,
)
cmd.Env = cfg.ExecEnv.Collapse()
cmd.Stdout = rcfg.Results
diff --git a/sweet/harnesses/local.go b/sweet/harnesses/local.go
index c5aa29f..169d3ad 100644
--- a/sweet/harnesses/local.go
+++ b/sweet/harnesses/local.go
@@ -79,11 +79,14 @@
return &localBenchHarness{
binName: "bleve-index-bench",
genArgs: func(cfg *common.Config, rcfg *common.RunConfig) []string {
- return []string{
+ args := []string{
"-batch-size", "100",
- "-documents", "1000",
filepath.Join(rcfg.AssetsDir, "enwiki-20080103-pages-articles.xml.bz2"),
}
+ if rcfg.Short {
+ args = append([]string{"-documents", "100"}, args...)
+ }
+ return args
},
}
}
@@ -92,9 +95,11 @@
return &localBenchHarness{
binName: "bleve-query-bench",
genArgs: func(cfg *common.Config, rcfg *common.RunConfig) []string {
- return []string{
- filepath.Join(rcfg.AssetsDir, "index"),
+ args := []string{filepath.Join(rcfg.AssetsDir, "index")}
+ if rcfg.Short {
+ args = append([]string{"-iterations", "1"}, args...)
}
+ return args
},
beforeRun: func(cfg *common.Config, rcfg *common.RunConfig) error {
// Make sure all the index passed to the benchmark is writeable.
@@ -108,9 +113,11 @@
return &localBenchHarness{
binName: "fogleman-fauxgl-bench",
genArgs: func(cfg *common.Config, rcfg *common.RunConfig) []string {
- return []string{
- filepath.Join(rcfg.AssetsDir, "3dbenchy.stl"),
+ args := []string{filepath.Join(rcfg.AssetsDir, "3dbenchy.stl")}
+ if rcfg.Short {
+ args = append([]string{"-images-per-rotation", "1"}, args...)
}
+ return args
},
noStdout: true,
}
@@ -133,10 +140,14 @@
return &localBenchHarness{
binName: "gopher-lua-bench",
genArgs: func(cfg *common.Config, rcfg *common.RunConfig) []string {
- return []string{
+ args := []string{
filepath.Join(rcfg.AssetsDir, "k-nucleotide.lua"),
filepath.Join(rcfg.AssetsDir, "input.txt"),
}
+ if rcfg.Short {
+ args = append([]string{"-short"}, args...)
+ }
+ return args
},
}
}
diff --git a/sweet/harnesses/tile38.go b/sweet/harnesses/tile38.go
index ef1692a..32fcf95 100644
--- a/sweet/harnesses/tile38.go
+++ b/sweet/harnesses/tile38.go
@@ -60,15 +60,19 @@
if err := makeWriteable(dataPath); err != nil {
return err
}
+ args := append(rcfg.Args, []string{
+ "-host", "127.0.0.1",
+ "-port", "9851",
+ "-server", filepath.Join(rcfg.BinDir, server),
+ "-data", dataPath,
+ "-tmp", rcfg.TmpDir,
+ }...)
+ if rcfg.Short {
+ args = append(args, "-short")
+ }
cmd := exec.Command(
filepath.Join(rcfg.BinDir, "tile38-bench"),
- append(rcfg.Args, []string{
- "-host", "127.0.0.1",
- "-port", "9851",
- "-server", filepath.Join(rcfg.BinDir, server),
- "-data", dataPath,
- "-tmp", rcfg.TmpDir,
- }...)...,
+ args...,
)
cmd.Env = cfg.ExecEnv.Collapse()
cmd.Stdout = rcfg.Results