Commit aa9d2cb2 authored by dvyukov's avatar dvyukov

-

parent 24be1b2b
...@@ -4,8 +4,12 @@ import ( ...@@ -4,8 +4,12 @@ import (
"time" "time"
) )
func Benchmark(N int64) error { func main() {
// 13+ PerfBenchmark(SleepBenchmark)
}
func SleepBenchmark(N int64) (metrics []PerfMetric, err error) {
time.Sleep(time.Duration(N) * time.Millisecond) time.Sleep(time.Duration(N) * time.Millisecond)
return nil metrics = append(metrics, PerfMetric{"foo", 42})
return
} }
...@@ -3,8 +3,9 @@ package main ...@@ -3,8 +3,9 @@ package main
import ( import (
"flag" "flag"
"fmt" "fmt"
"os" "log"
"time" "time"
"runtime"
) )
var ( var (
...@@ -13,45 +14,68 @@ var ( ...@@ -13,45 +14,68 @@ var (
benchMem = flag.Int("benchmem", 64, "approx RSS value to aim at in benchmarks, in MB") benchMem = flag.Int("benchmem", 64, "approx RSS value to aim at in benchmarks, in MB")
) )
type Result struct { type PerfResult struct {
N int64 N int64
RunTime time.Duration RunTime time.Duration
Metrics []PerfMetric
} }
func main() { type PerfMetric struct {
flag.Parse() Type string
var res Result Val int64
}
type BenchFunc func(N int64) ([]PerfMetric, error)
func PerfBenchmark(f BenchFunc) {
if !flag.Parsed() {
flag.Parse()
}
var res PerfResult
for i := 0; i < *benchNum; i++ { for i := 0; i < *benchNum; i++ {
res1 := RunBenchmark() res1 := RunBenchmark(f)
if res.RunTime == 0 || res.RunTime > res1.RunTime { if res.RunTime == 0 || res.RunTime > res1.RunTime {
res = res1 res = res1
} }
} }
fmt.Printf("GOPERF-METRIC:runtime=%v\n", int64(res.RunTime)/res.N) fmt.Printf("GOPERF-METRIC:runtime=%v\n", int64(res.RunTime)/res.N)
for _, m := range res.Metrics {
fmt.Printf("GOPERF-METRIC:%v=%v\n", m.Type, m.Val)
}
} }
func RunBenchmark() Result { func RunBenchmark(f BenchFunc) PerfResult {
var res Result var res PerfResult
for ChooseN(&res) { for ChooseN(&res) {
res = RunOnce(res.N) log.Printf("Benchmarking %v iterations\n", res.N)
res = RunOnce(f, res.N)
log.Printf("Done: %+v\n", res)
} }
return res return res
} }
func RunOnce(N int64) Result { func RunOnce(f BenchFunc, N int64) PerfResult {
fmt.Printf("Benchmarking %v iterations\n", N) runtime.GC()
mstats0 := new(runtime.MemStats)
runtime.ReadMemStats(mstats0)
res := PerfResult{N: N}
t0 := time.Now() t0 := time.Now()
err := Benchmark(N) var err error
res.Metrics, err = f(N)
res.RunTime = time.Since(t0)
if err != nil { if err != nil {
fmt.Printf("Benchmark function failed: %v\n", err) log.Fatalf("Benchmark function failed: %v\n", err)
os.Exit(1)
} }
res := Result{N: N}
res.RunTime = time.Since(t0) mstats1 := new(runtime.MemStats)
runtime.ReadMemStats(mstats1)
fmt.Printf("%+v\n", *mstats1)
return res return res
} }
func ChooseN(res *Result) bool { func ChooseN(res *PerfResult) bool {
const MaxN = 1e12 const MaxN = 1e12
last := res.N last := res.N
if last == 0 { if last == 0 {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment