app/appengine: remove ancient performance/benchmarking code

This was the performance/benchmark code from ~three generations ago.

It's unused and unmaintained. It broke when we moved from mercurial to
git, IIRC.

I'm attempting to modernize this code (for golang/go#34744) but it'd
be easier if there's less code to deal with.

Updates golang/go#34744

Change-Id: Ib4999830b05df9ffad9b46964022325404350b47
Reviewed-on: https://go-review.googlesource.com/c/build/+/208319
Reviewed-by: Bryan C. Mills <bcmills@google.com>
Reviewed-by: Andrew Bonventre <andybons@golang.org>
This commit is contained in:
Brad Fitzpatrick 2019-11-21 20:40:42 +00:00
Родитель ff18cb3e05
Коммит cf930f68e4
17 изменённых файлов: 1 добавлений и 2507 удалений

Просмотреть файл

@ -5,7 +5,7 @@ handlers:
- url: /static
static_dir: app/appengine/static
secure: always
- url: /(init|buildtest|key|perflearn|_ah/queue/go/delay)
- url: /(init|buildtest|key|_ah/queue/go/delay)
script: auto
login: admin
secure: always

Просмотреть файл

@ -12,13 +12,10 @@ import (
"fmt"
"io"
"io/ioutil"
"net/http"
"sort"
"strconv"
"strings"
"time"
"golang.org/x/build/app/cache"
"golang.org/x/build/dashboard"
"golang.org/x/build/internal/loghash"
"google.golang.org/appengine/datastore"
@ -480,393 +477,6 @@ func (r *Result) Data() string {
return fmt.Sprintf("%v|%v|%v|%v", r.Builder, r.OK, r.LogHash, r.GoHash)
}
// A PerfResult describes all benchmarking result for a Commit.
// Descendant of Package.
type PerfResult struct {
PackagePath string
CommitHash string
CommitNum int
Data []string `datastore:",noindex"` // "builder|benchmark|ok|metric1=val1|metric2=val2|file:log=hash|file:cpuprof=hash"
// Local cache with parsed Data.
// Maps builder->benchmark->ParsedPerfResult.
parsedData map[string]map[string]*ParsedPerfResult
}
type ParsedPerfResult struct {
OK bool
Metrics map[string]uint64
Artifacts map[string]string
}
func (r *PerfResult) Key(c context.Context) *datastore.Key {
p := Package{Path: r.PackagePath}
key := r.CommitHash
return datastore.NewKey(c, "PerfResult", key, 0, p.Key(c))
}
// AddResult add the benchmarking result to r.
// Existing result for the same builder/benchmark is replaced if already exists.
// Returns whether the result was already present.
func (r *PerfResult) AddResult(req *PerfRequest) bool {
present := false
str := fmt.Sprintf("%v|%v|", req.Builder, req.Benchmark)
for i, s := range r.Data {
if strings.HasPrefix(s, str) {
present = true
last := len(r.Data) - 1
r.Data[i] = r.Data[last]
r.Data = r.Data[:last]
break
}
}
ok := "ok"
if !req.OK {
ok = "false"
}
str += ok
for _, m := range req.Metrics {
str += fmt.Sprintf("|%v=%v", m.Type, m.Val)
}
for _, a := range req.Artifacts {
str += fmt.Sprintf("|file:%v=%v", a.Type, a.Body)
}
r.Data = append(r.Data, str)
r.parsedData = nil
return present
}
func (r *PerfResult) ParseData() map[string]map[string]*ParsedPerfResult {
if r.parsedData != nil {
return r.parsedData
}
res := make(map[string]map[string]*ParsedPerfResult)
for _, str := range r.Data {
ss := strings.Split(str, "|")
builder := ss[0]
bench := ss[1]
ok := ss[2]
m := res[builder]
if m == nil {
m = make(map[string]*ParsedPerfResult)
res[builder] = m
}
var p ParsedPerfResult
p.OK = ok == "ok"
p.Metrics = make(map[string]uint64)
p.Artifacts = make(map[string]string)
for _, entry := range ss[3:] {
if strings.HasPrefix(entry, "file:") {
ss1 := strings.Split(entry[len("file:"):], "=")
p.Artifacts[ss1[0]] = ss1[1]
} else {
ss1 := strings.Split(entry, "=")
val, _ := strconv.ParseUint(ss1[1], 10, 64)
p.Metrics[ss1[0]] = val
}
}
m[bench] = &p
}
r.parsedData = res
return res
}
// A PerfMetricRun entity holds a set of metric values for builder/benchmark/metric
// for commits [StartCommitNum, StartCommitNum + PerfRunLength).
// Descendant of Package.
type PerfMetricRun struct {
PackagePath string
Builder string
Benchmark string
Metric string // e.g. realtime, cputime, gc-pause
StartCommitNum int
Vals []int64 `datastore:",noindex"`
}
func (m *PerfMetricRun) Key(c context.Context) *datastore.Key {
p := Package{Path: m.PackagePath}
key := m.Builder + "|" + m.Benchmark + "|" + m.Metric + "|" + strconv.Itoa(m.StartCommitNum)
return datastore.NewKey(c, "PerfMetricRun", key, 0, p.Key(c))
}
// GetPerfMetricRun loads and returns PerfMetricRun that contains information
// for commit commitNum.
func GetPerfMetricRun(c context.Context, builder, benchmark, metric string, commitNum int) (*PerfMetricRun, error) {
startCommitNum := commitNum / PerfRunLength * PerfRunLength
m := &PerfMetricRun{Builder: builder, Benchmark: benchmark, Metric: metric, StartCommitNum: startCommitNum}
err := datastore.Get(c, m.Key(c), m)
if err != nil && err != datastore.ErrNoSuchEntity {
return nil, fmt.Errorf("getting PerfMetricRun: %v", err)
}
if len(m.Vals) != PerfRunLength {
m.Vals = make([]int64, PerfRunLength)
}
return m, nil
}
func (m *PerfMetricRun) AddMetric(c context.Context, commitNum int, v uint64) error {
if commitNum < m.StartCommitNum || commitNum >= m.StartCommitNum+PerfRunLength {
return fmt.Errorf("AddMetric: CommitNum %v out of range [%v, %v)",
commitNum, m.StartCommitNum, m.StartCommitNum+PerfRunLength)
}
m.Vals[commitNum-m.StartCommitNum] = int64(v)
if _, err := datastore.Put(c, m.Key(c), m); err != nil {
return fmt.Errorf("putting PerfMetricRun: %v", err)
}
return nil
}
// GetPerfMetricsForCommits returns perf metrics for builder/benchmark/metric
// and commits [startCommitNum, startCommitNum+n).
func GetPerfMetricsForCommits(c context.Context, builder, benchmark, metric string, startCommitNum, n int) ([]uint64, error) {
if startCommitNum < 0 || n <= 0 {
return nil, fmt.Errorf("GetPerfMetricsForCommits: invalid args (%v, %v)", startCommitNum, n)
}
p := &Package{}
t := datastore.NewQuery("PerfMetricRun").
Ancestor(p.Key(c)).
Filter("Builder =", builder).
Filter("Benchmark =", benchmark).
Filter("Metric =", metric).
Filter("StartCommitNum >=", startCommitNum/PerfRunLength*PerfRunLength).
Order("StartCommitNum").
Limit(100).
Run(c)
res := make([]uint64, n)
for {
metrics := new(PerfMetricRun)
_, err := t.Next(metrics)
if err == datastore.Done {
break
}
if err != nil {
return nil, err
}
if metrics.StartCommitNum >= startCommitNum+n {
break
}
// Calculate start index for copying.
i := 0
if metrics.StartCommitNum < startCommitNum {
i = startCommitNum - metrics.StartCommitNum
}
// Calculate end index for copying.
e := PerfRunLength
if metrics.StartCommitNum+e > startCommitNum+n {
e = startCommitNum + n - metrics.StartCommitNum
}
for ; i < e; i++ {
res[metrics.StartCommitNum-startCommitNum+i] = uint64(metrics.Vals[i])
}
if e != PerfRunLength {
break
}
}
return res, nil
}
// PerfConfig holds read-mostly configuration related to benchmarking.
// There is only one PerfConfig entity.
type PerfConfig struct {
BuilderBench []string `datastore:",noindex"` // "builder|benchmark" pairs
BuilderProcs []string `datastore:",noindex"` // "builder|proc" pairs
BenchMetric []string `datastore:",noindex"` // "benchmark|metric" pairs
NoiseLevels []string `datastore:",noindex"` // "builder|benchmark|metric1=noise1|metric2=noise2"
// Local cache of "builder|benchmark|metric" -> noise.
noise map[string]float64
}
func PerfConfigKey(c context.Context) *datastore.Key {
p := Package{}
return datastore.NewKey(c, "PerfConfig", "PerfConfig", 0, p.Key(c))
}
const perfConfigCacheKey = "perf-config"
func GetPerfConfig(c context.Context, r *http.Request) (*PerfConfig, error) {
pc := new(PerfConfig)
now := cache.Now(c)
if cache.Get(c, r, now, perfConfigCacheKey, pc) {
return pc, nil
}
err := datastore.Get(c, PerfConfigKey(c), pc)
if err != nil && err != datastore.ErrNoSuchEntity {
return nil, fmt.Errorf("GetPerfConfig: %v", err)
}
cache.Set(c, r, now, perfConfigCacheKey, pc)
return pc, nil
}
func (pc *PerfConfig) NoiseLevel(builder, benchmark, metric string) float64 {
if pc.noise == nil {
pc.noise = make(map[string]float64)
for _, str := range pc.NoiseLevels {
split := strings.Split(str, "|")
builderBench := split[0] + "|" + split[1]
for _, entry := range split[2:] {
metricValue := strings.Split(entry, "=")
noise, _ := strconv.ParseFloat(metricValue[1], 64)
pc.noise[builderBench+"|"+metricValue[0]] = noise
}
}
}
me := fmt.Sprintf("%v|%v|%v", builder, benchmark, metric)
n := pc.noise[me]
if n == 0 {
// Use a very conservative value
// until we have learned the real noise level.
n = 200
}
return n
}
// UpdatePerfConfig updates the PerfConfig entity with results of benchmarking.
// Returns whether it's a benchmark that we have not yet seem on the builder.
func UpdatePerfConfig(c context.Context, r *http.Request, req *PerfRequest) (newBenchmark bool, err error) {
pc, err := GetPerfConfig(c, r)
if err != nil {
return false, err
}
modified := false
add := func(arr *[]string, str string) {
for _, s := range *arr {
if s == str {
return
}
}
*arr = append(*arr, str)
modified = true
return
}
BenchProcs := strings.Split(req.Benchmark, "-")
benchmark := BenchProcs[0]
procs := "1"
if len(BenchProcs) > 1 {
procs = BenchProcs[1]
}
add(&pc.BuilderBench, req.Builder+"|"+benchmark)
newBenchmark = modified
add(&pc.BuilderProcs, req.Builder+"|"+procs)
for _, m := range req.Metrics {
add(&pc.BenchMetric, benchmark+"|"+m.Type)
}
if modified {
if _, err := datastore.Put(c, PerfConfigKey(c), pc); err != nil {
return false, fmt.Errorf("putting PerfConfig: %v", err)
}
cache.Tick(c)
}
return newBenchmark, nil
}
type MetricList []string
func (l MetricList) Len() int {
return len(l)
}
func (l MetricList) Less(i, j int) bool {
bi := strings.HasPrefix(l[i], "build-") || strings.HasPrefix(l[i], "binary-")
bj := strings.HasPrefix(l[j], "build-") || strings.HasPrefix(l[j], "binary-")
if bi == bj {
return l[i] < l[j]
}
return !bi
}
func (l MetricList) Swap(i, j int) {
l[i], l[j] = l[j], l[i]
}
func collectList(all []string, idx int, second string) (res []string) {
m := make(map[string]bool)
for _, str := range all {
ss := strings.Split(str, "|")
v := ss[idx]
v2 := ss[1-idx]
if (second == "" || second == v2) && !m[v] {
m[v] = true
res = append(res, v)
}
}
sort.Sort(MetricList(res))
return res
}
func (pc *PerfConfig) BuildersForBenchmark(bench string) []string {
return collectList(pc.BuilderBench, 0, bench)
}
func (pc *PerfConfig) BenchmarksForBuilder(builder string) []string {
return collectList(pc.BuilderBench, 1, builder)
}
func (pc *PerfConfig) MetricsForBenchmark(bench string) []string {
return collectList(pc.BenchMetric, 1, bench)
}
func (pc *PerfConfig) BenchmarkProcList() (res []string) {
bl := pc.BenchmarksForBuilder("")
pl := pc.ProcList("")
for _, b := range bl {
for _, p := range pl {
res = append(res, fmt.Sprintf("%v-%v", b, p))
}
}
return res
}
func (pc *PerfConfig) ProcList(builder string) []int {
ss := collectList(pc.BuilderProcs, 1, builder)
var procs []int
for _, s := range ss {
p, _ := strconv.ParseInt(s, 10, 32)
procs = append(procs, int(p))
}
sort.Ints(procs)
return procs
}
// A PerfTodo contains outstanding commits for benchmarking for a builder.
// Descendant of Package.
type PerfTodo struct {
PackagePath string // (empty for main repo commits)
Builder string
CommitNums []int `datastore:",noindex"` // LIFO queue of commits to benchmark.
}
func (todo *PerfTodo) Key(c context.Context) *datastore.Key {
p := Package{Path: todo.PackagePath}
key := todo.Builder
return datastore.NewKey(c, "PerfTodo", key, 0, p.Key(c))
}
// AddCommitToPerfTodo adds the commit to all existing PerfTodo entities.
func AddCommitToPerfTodo(c context.Context, com *Commit) error {
var todos []*PerfTodo
_, err := datastore.NewQuery("PerfTodo").
Ancestor((&Package{}).Key(c)).
GetAll(c, &todos)
if err != nil {
return fmt.Errorf("fetching PerfTodo's: %v", err)
}
for _, todo := range todos {
todo.CommitNums = append(todo.CommitNums, com.Num)
_, err = datastore.Put(c, todo.Key(c), todo)
if err != nil {
return fmt.Errorf("updating PerfTodo: %v", err)
}
}
return nil
}
// A Log is a gzip-compressed log file stored under the SHA1 hash of the
// uncompressed log text.
type Log struct {

Просмотреть файл

@ -26,7 +26,6 @@ func main() {
handleFunc("/clear-results", AuthHandler(clearResultsHandler))
handleFunc("/commit", AuthHandler(commitHandler))
handleFunc("/packages", AuthHandler(packagesHandler))
handleFunc("/perf-result", AuthHandler(perfResultHandler))
handleFunc("/result", AuthHandler(resultHandler))
handleFunc("/tag", AuthHandler(tagHandler))
handleFunc("/todo", AuthHandler(todoHandler))
@ -34,12 +33,8 @@ func main() {
// public handlers
handleFunc("/", uiHandler)
handleFunc("/log/", logHandler)
handleFunc("/perf", perfChangesHandler)
handleFunc("/perfdetail", perfDetailUIHandler)
handleFunc("/perfgraph", perfGraphHandler)
handleFunc("/updatebenchmark", updateBenchmark)
handleFunc("/buildtest", testHandler)
handleFunc("/perflearn", perfLearnHandler)
appengine.Main()
}

Просмотреть файл

@ -5,7 +5,6 @@
package main
import (
"bytes"
"context"
"crypto/hmac"
"crypto/md5"
@ -80,7 +79,6 @@ func commitHandler(r *http.Request) (interface{}, error) {
}
// Strip potentially large and unnecessary fields.
com.ResultData = nil
com.PerfResults = nil
return com, nil
}
if r.Method != "POST" {
@ -102,10 +100,6 @@ func commitHandler(r *http.Request) (interface{}, error) {
if err != nil {
return nil, fmt.Errorf("reading Body: %v", err)
}
if !bytes.Contains(body, needsBenchmarkingBytes) {
log.Warningf(c, "old builder detected at %v", r.RemoteAddr)
return nil, fmt.Errorf("rejecting old builder request, body does not contain %s: %q", needsBenchmarkingBytes, body)
}
if err := json.Unmarshal(body, com); err != nil {
return nil, fmt.Errorf("unmarshaling body %q: %v", body, err)
}
@ -120,8 +114,6 @@ func commitHandler(r *http.Request) (interface{}, error) {
return nil, datastore.RunInTransaction(c, tx, nil)
}
var needsBenchmarkingBytes = []byte(`"NeedsBenchmarking"`)
// addCommit adds the Commit entity to the datastore and updates the tip Tag.
// It must be run inside a datastore transaction.
func addCommit(c context.Context, com *Commit) error {
@ -209,25 +201,6 @@ func addCommit(c context.Context, com *Commit) error {
if err = putCommit(c, com); err != nil {
return err
}
if com.NeedsBenchmarking {
// add to CommitRun
cr, err := GetCommitRun(c, com.Num)
if err != nil {
return err
}
if err = cr.AddCommit(c, com); err != nil {
return err
}
// create PerfResult
res := &PerfResult{CommitHash: com.Hash, CommitNum: com.Num}
if _, err := datastore.Put(c, res.Key(c), res); err != nil {
return fmt.Errorf("putting PerfResult: %v", err)
}
// Update perf todo if necessary.
if err = AddCommitToPerfTodo(c, com); err != nil {
return err
}
}
return nil
}
@ -285,18 +258,10 @@ func todoHandler(r *http.Request) (interface{}, error) {
switch kind {
case "build-go-commit":
com, err = buildTodo(c, builder, "", "")
if com != nil {
com.PerfResults = []string{}
}
case "build-package":
packagePath := r.FormValue("packagePath")
goHash := r.FormValue("goHash")
com, err = buildTodo(c, builder, packagePath, goHash)
if com != nil {
com.PerfResults = []string{}
}
case "benchmark-go-commit":
com, err = perfTodo(c, builder)
}
if com != nil || err != nil {
if com != nil {
@ -396,129 +361,6 @@ func buildTodo(c context.Context, builder, packagePath, goHash string) (*Commit,
return nil, nil
}
// perfTodo returns the next Commit to be benchmarked (or nil if none available).
func perfTodo(c context.Context, builder string) (*Commit, error) {
p := &Package{}
todo := &PerfTodo{Builder: builder}
err := datastore.Get(c, todo.Key(c), todo)
if err != nil && err != datastore.ErrNoSuchEntity {
return nil, fmt.Errorf("fetching PerfTodo: %v", err)
}
if err == datastore.ErrNoSuchEntity {
todo, err = buildPerfTodo(c, builder)
if err != nil {
return nil, err
}
}
if len(todo.CommitNums) == 0 {
return nil, nil
}
// Have commit to benchmark, fetch it.
num := todo.CommitNums[len(todo.CommitNums)-1]
t := datastore.NewQuery("Commit").
Ancestor(p.Key(c)).
Filter("Num =", num).
Limit(1).
Run(c)
com := new(Commit)
if _, err := t.Next(com); err != nil {
return nil, err
}
if !com.NeedsBenchmarking {
return nil, fmt.Errorf("commit from perf todo queue is not intended for benchmarking")
}
// Remove benchmarks from other builders.
var benchs []string
for _, b := range com.PerfResults {
bb := strings.Split(b, "|")
if bb[0] == builder && bb[1] != "meta-done" {
benchs = append(benchs, bb[1])
}
}
com.PerfResults = benchs
return com, nil
}
// buildPerfTodo creates PerfTodo for the builder with all commits. In a transaction.
func buildPerfTodo(c context.Context, builder string) (*PerfTodo, error) {
todo := &PerfTodo{Builder: builder}
tx := func(c context.Context) error {
err := datastore.Get(c, todo.Key(c), todo)
if err != nil && err != datastore.ErrNoSuchEntity {
return fmt.Errorf("fetching PerfTodo: %v", err)
}
if err == nil {
return nil
}
t := datastore.NewQuery("CommitRun").
Ancestor((&Package{}).Key(c)).
Order("-StartCommitNum").
Run(c)
var nums []int
var releaseNums []int
loop:
for {
cr := new(CommitRun)
if _, err := t.Next(cr); err == datastore.Done {
break
} else if err != nil {
return fmt.Errorf("scanning commit runs for perf todo: %v", err)
}
for i := len(cr.Hash) - 1; i >= 0; i-- {
if !cr.NeedsBenchmarking[i] || cr.Hash[i] == "" {
continue // There's nothing to see here. Move along.
}
num := cr.StartCommitNum + i
for k, v := range knownTags {
// Releases are benchmarked first, because they are important (and there are few of them).
if cr.Hash[i] == v {
releaseNums = append(releaseNums, num)
if k == "go1" {
break loop // Point of no benchmark: test/bench/shootout: update timing.log to Go 1.
}
}
}
nums = append(nums, num)
}
}
todo.CommitNums = orderPerfTodo(nums)
todo.CommitNums = append(todo.CommitNums, releaseNums...)
if _, err = datastore.Put(c, todo.Key(c), todo); err != nil {
return fmt.Errorf("putting PerfTodo: %v", err)
}
return nil
}
return todo, datastore.RunInTransaction(c, tx, nil)
}
func removeCommitFromPerfTodo(c context.Context, builder string, num int) error {
todo := &PerfTodo{Builder: builder}
err := datastore.Get(c, todo.Key(c), todo)
if err != nil && err != datastore.ErrNoSuchEntity {
return fmt.Errorf("fetching PerfTodo: %v", err)
}
if err == datastore.ErrNoSuchEntity {
return nil
}
for i := len(todo.CommitNums) - 1; i >= 0; i-- {
if todo.CommitNums[i] == num {
for ; i < len(todo.CommitNums)-1; i++ {
todo.CommitNums[i] = todo.CommitNums[i+1]
}
todo.CommitNums = todo.CommitNums[:i]
_, err = datastore.Put(c, todo.Key(c), todo)
if err != nil {
return fmt.Errorf("putting PerfTodo: %v", err)
}
break
}
}
return nil
}
// packagesHandler returns a list of the non-Go Packages monitored
// by the dashboard.
func packagesHandler(r *http.Request) (interface{}, error) {
@ -617,204 +459,6 @@ func resultHandler(r *http.Request) (interface{}, error) {
return nil, datastore.RunInTransaction(c, tx, nil)
}
// perf-result request payload
type PerfRequest struct {
Builder string
Benchmark string
Hash string
OK bool
Metrics []PerfMetric
Artifacts []PerfArtifact
}
type PerfMetric struct {
Type string
Val uint64
}
type PerfArtifact struct {
Type string
Body string
}
// perfResultHandler records a becnhmarking result.
func perfResultHandler(r *http.Request) (interface{}, error) {
defer r.Body.Close()
if r.Method != "POST" {
return nil, errBadMethod(r.Method)
}
req := new(PerfRequest)
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
return nil, fmt.Errorf("decoding Body: %v", err)
}
c := contextForRequest(r)
defer cache.Tick(c)
// store the text files if supplied
for i, a := range req.Artifacts {
hash, err := PutLog(c, a.Body)
if err != nil {
return nil, fmt.Errorf("putting Log: %v", err)
}
req.Artifacts[i].Body = hash
}
tx := func(c context.Context) error {
return addPerfResult(c, r, req)
}
return nil, datastore.RunInTransaction(c, tx, nil)
}
// addPerfResult creates PerfResult and updates Commit, PerfTodo,
// PerfMetricRun and PerfConfig.
// MUST be called from inside a transaction.
func addPerfResult(c context.Context, r *http.Request, req *PerfRequest) error {
// check Package exists
p, err := GetPackage(c, "")
if err != nil {
return fmt.Errorf("GetPackage: %v", err)
}
// add result to Commit
com := &Commit{Hash: req.Hash}
if err := com.AddPerfResult(c, req.Builder, req.Benchmark); err != nil {
return fmt.Errorf("AddPerfResult: %v", err)
}
// add the result to PerfResult
res := &PerfResult{CommitHash: req.Hash}
if err := datastore.Get(c, res.Key(c), res); err != nil {
return fmt.Errorf("getting PerfResult: %v", err)
}
present := res.AddResult(req)
if _, err := datastore.Put(c, res.Key(c), res); err != nil {
return fmt.Errorf("putting PerfResult: %v", err)
}
// Meta-done denotes that there are no benchmarks left.
if req.Benchmark == "meta-done" {
// Don't send duplicate emails for the same commit/builder.
// And don't send emails about too old commits.
if !present && com.Num >= p.NextNum-commitsPerPage {
if err := checkPerfChanges(c, r, com, req.Builder, res); err != nil {
return err
}
}
if err := removeCommitFromPerfTodo(c, req.Builder, com.Num); err != nil {
return nil
}
return nil
}
// update PerfConfig
newBenchmark, err := UpdatePerfConfig(c, r, req)
if err != nil {
return fmt.Errorf("updating PerfConfig: %v", err)
}
if newBenchmark {
// If this is a new benchmark on the builder, delete PerfTodo.
// It will be recreated later with all commits again.
todo := &PerfTodo{Builder: req.Builder}
err = datastore.Delete(c, todo.Key(c))
if err != nil && err != datastore.ErrNoSuchEntity {
return fmt.Errorf("deleting PerfTodo: %v", err)
}
}
// add perf metrics
for _, metric := range req.Metrics {
m, err := GetPerfMetricRun(c, req.Builder, req.Benchmark, metric.Type, com.Num)
if err != nil {
return fmt.Errorf("GetPerfMetrics: %v", err)
}
if err = m.AddMetric(c, com.Num, metric.Val); err != nil {
return fmt.Errorf("AddMetric: %v", err)
}
}
return nil
}
// MUST be called from inside a transaction.
func checkPerfChanges(c context.Context, r *http.Request, com *Commit, builder string, res *PerfResult) error {
pc, err := GetPerfConfig(c, r)
if err != nil {
return err
}
results := res.ParseData()[builder]
rcNewer := MakePerfResultCache(c, com, true)
rcOlder := MakePerfResultCache(c, com, false)
// Check whether we need to send failure notification email.
if results["meta-done"].OK {
// This one is successful, see if the next is failed.
nextRes, err := rcNewer.Next(com.Num)
if err != nil {
return err
}
if nextRes != nil && isPerfFailed(nextRes, builder) {
sendPerfFailMail(c, builder, nextRes)
}
} else {
// This one is failed, see if the previous is successful.
prevRes, err := rcOlder.Next(com.Num)
if err != nil {
return err
}
if prevRes != nil && !isPerfFailed(prevRes, builder) {
sendPerfFailMail(c, builder, res)
}
}
// Now see if there are any performance changes.
// Find the previous and the next results for performance comparison.
prevRes, err := rcOlder.NextForComparison(com.Num, builder)
if err != nil {
return err
}
nextRes, err := rcNewer.NextForComparison(com.Num, builder)
if err != nil {
return err
}
if results["meta-done"].OK {
// This one is successful, compare with a previous one.
if prevRes != nil {
if err := comparePerfResults(c, pc, builder, prevRes, res); err != nil {
return err
}
}
// Compare a next one with the current.
if nextRes != nil {
if err := comparePerfResults(c, pc, builder, res, nextRes); err != nil {
return err
}
}
} else {
// This one is failed, compare a previous one with a next one.
if prevRes != nil && nextRes != nil {
if err := comparePerfResults(c, pc, builder, prevRes, nextRes); err != nil {
return err
}
}
}
return nil
}
func comparePerfResults(c context.Context, pc *PerfConfig, builder string, prevRes, res *PerfResult) error {
changes := significantPerfChanges(pc, builder, prevRes, res)
if len(changes) == 0 {
return nil
}
com := &Commit{Hash: res.CommitHash}
if err := datastore.Get(c, com.Key(c), com); err != nil {
return fmt.Errorf("getting commit %v: %v", com.Hash, err)
}
sendPerfMailLater.Call(c, com, prevRes.CommitHash, builder, changes) // add task to queue
return nil
}
// logHandler displays log text for a given hash.
// It handles paths like "/log/hash".
func logHandler(w http.ResponseWriter, r *http.Request) {

Просмотреть файл

@ -12,7 +12,6 @@ import (
"io/ioutil"
"net/http"
"runtime"
"sort"
"strings"
"text/template"
@ -197,26 +196,6 @@ func postGerritMessage(c context.Context, com *Commit, message string) error {
return nil
}
// MUST be called from inside a transaction.
func sendPerfFailMail(c context.Context, builder string, res *PerfResult) error {
com := &Commit{Hash: res.CommitHash}
if err := datastore.Get(c, com.Key(c), com); err != nil {
return err
}
logHash := ""
parsed := res.ParseData()
for _, data := range parsed[builder] {
if !data.OK {
logHash = data.Artifacts["log"]
break
}
}
if logHash == "" {
return fmt.Errorf("can not find failed result for commit %v on builder %v", com.Hash, builder)
}
return commonNotify(c, com, builder, logHash)
}
// commonNotify MUST!!! be called from within a transaction inside which
// the provided Commit entity was retrieved from the datastore.
func commonNotify(c context.Context, com *Commit, builder, logHash string) error {
@ -235,83 +214,3 @@ func commonNotify(c context.Context, com *Commit, builder, logHash string) error
com.FailNotificationSent = true
return putCommit(c, com)
}
type PerfChangeBenchmark struct {
Name string
Metrics []*PerfChangeMetric
}
type PerfChangeMetric struct {
Name string
Old uint64
New uint64
Delta float64
}
type PerfChangeBenchmarkSlice []*PerfChangeBenchmark
func (l PerfChangeBenchmarkSlice) Len() int { return len(l) }
func (l PerfChangeBenchmarkSlice) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l PerfChangeBenchmarkSlice) Less(i, j int) bool {
b1, p1 := splitBench(l[i].Name)
b2, p2 := splitBench(l[j].Name)
if b1 != b2 {
return b1 < b2
}
return p1 < p2
}
type PerfChangeMetricSlice []*PerfChangeMetric
func (l PerfChangeMetricSlice) Len() int { return len(l) }
func (l PerfChangeMetricSlice) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l PerfChangeMetricSlice) Less(i, j int) bool { return l[i].Name < l[j].Name }
var (
sendPerfMailLater = delay.Func("sendPerfMail", sendPerfMailFunc)
sendPerfMailTmpl = template.Must(
template.New("perf_notify.txt").
Funcs(template.FuncMap(tmplFuncs)).
ParseFiles(templateFile("perf_notify.txt")),
)
)
func sendPerfMailFunc(c context.Context, com *Commit, prevCommitHash, builder string, changes []*PerfChange) {
// Sort the changes into the right order.
var benchmarks []*PerfChangeBenchmark
for _, ch := range changes {
// Find the benchmark.
var b *PerfChangeBenchmark
for _, b1 := range benchmarks {
if b1.Name == ch.Bench {
b = b1
break
}
}
if b == nil {
b = &PerfChangeBenchmark{Name: ch.Bench}
benchmarks = append(benchmarks, b)
}
b.Metrics = append(b.Metrics, &PerfChangeMetric{Name: ch.Metric, Old: ch.Old, New: ch.New, Delta: ch.Diff})
}
for _, b := range benchmarks {
sort.Sort(PerfChangeMetricSlice(b.Metrics))
}
sort.Sort(PerfChangeBenchmarkSlice(benchmarks))
u := fmt.Sprintf("https://%v/perfdetail?commit=%v&commit0=%v&kind=builder&builder=%v", domain, com.Hash, prevCommitHash, builder)
// Prepare mail message (without Commit, for updateCL).
var body bytes.Buffer
err := sendPerfMailTmpl.Execute(&body, map[string]interface{}{
"Builder": builder, "Hostname": domain, "Url": u, "Benchmarks": benchmarks,
})
if err != nil {
log.Errorf(c, "rendering perf mail template: %v", err)
return
}
if err := postGerritMessage(c, com, body.String()); err != nil {
log.Errorf(c, "posting to gerrit: %v", err)
}
}

Просмотреть файл

@ -1,312 +0,0 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"context"
"fmt"
"sort"
"strconv"
"strings"
"google.golang.org/appengine/datastore"
"google.golang.org/appengine/log"
)
var knownTags = map[string]string{
"go1": "0051c7442fed9c888de6617fa9239a913904d96e",
"go1.1": "d29da2ced72ba2cf48ed6a8f1ec4abc01e4c5bf1",
"go1.2": "b1edf8faa5d6cbc50c6515785df9df9c19296564",
"go1.3": "f153208c0a0e306bfca14f71ef11f09859ccabc8",
"go1.4": "faa3ed1dc30e42771a68b6337dcf8be9518d5c07",
}
var lastRelease = "go1.4"
func splitBench(benchProcs string) (string, int) {
ss := strings.Split(benchProcs, "-")
procs, _ := strconv.Atoi(ss[1])
return ss[0], procs
}
func dashPerfCommits(c context.Context, page int) ([]*Commit, error) {
q := datastore.NewQuery("Commit").
Ancestor((&Package{}).Key(c)).
Order("-Num").
Filter("NeedsBenchmarking =", true).
Limit(commitsPerPage).
Offset(page * commitsPerPage)
var commits []*Commit
_, err := q.GetAll(c, &commits)
if err == nil && len(commits) == 0 {
err = fmt.Errorf("no commits")
}
return commits, err
}
func perfChangeStyle(pc *PerfConfig, v float64, builder, benchmark, metric string) string {
noise := pc.NoiseLevel(builder, benchmark, metric)
if isNoise(v, noise) {
return "noise"
}
if v > 0 {
return "bad"
}
return "good"
}
func isNoise(diff, noise float64) bool {
rnoise := -100 * noise / (noise + 100)
return diff < noise && diff > rnoise
}
func perfDiff(old, new uint64) float64 {
return 100*float64(new)/float64(old) - 100
}
func isPerfFailed(res *PerfResult, builder string) bool {
data := res.ParseData()[builder]
return data != nil && data["meta-done"] != nil && !data["meta-done"].OK
}
// PerfResultCache caches a set of PerfResults so that it's easy to access them
// without lots of duplicate accesses to datastore.
// It allows to iterate over newer or older results for some base commit.
type PerfResultCache struct {
c context.Context
newer bool
iter *datastore.Iterator
results map[int]*PerfResult
}
func MakePerfResultCache(c context.Context, com *Commit, newer bool) *PerfResultCache {
p := &Package{}
q := datastore.NewQuery("PerfResult").Ancestor(p.Key(c)).Limit(100)
if newer {
q = q.Filter("CommitNum >=", com.Num).Order("CommitNum")
} else {
q = q.Filter("CommitNum <=", com.Num).Order("-CommitNum")
}
rc := &PerfResultCache{c: c, newer: newer, iter: q.Run(c), results: make(map[int]*PerfResult)}
return rc
}
func (rc *PerfResultCache) Get(commitNum int) *PerfResult {
rc.Next(commitNum) // fetch the commit, if necessary
return rc.results[commitNum]
}
// Next returns the next PerfResult for the commit commitNum.
// It does not care whether the result has any data, failed or whatever.
func (rc *PerfResultCache) Next(commitNum int) (*PerfResult, error) {
// See if we have next result in the cache.
next := -1
for ci := range rc.results {
if rc.newer {
if ci > commitNum && (next == -1 || ci < next) {
next = ci
}
} else {
if ci < commitNum && (next == -1 || ci > next) {
next = ci
}
}
}
if next != -1 {
return rc.results[next], nil
}
// Fetch next result from datastore.
res := new(PerfResult)
_, err := rc.iter.Next(res)
if err == datastore.Done {
return nil, nil
}
if err != nil {
return nil, fmt.Errorf("fetching perf results: %v", err)
}
if (rc.newer && res.CommitNum < commitNum) || (!rc.newer && res.CommitNum > commitNum) {
log.Errorf(rc.c, "PerfResultCache.Next: bad commit num")
}
rc.results[res.CommitNum] = res
return res, nil
}
// NextForComparison returns PerfResult which we need to use for performance comprison.
// It skips failed results, but does not skip results with no data.
func (rc *PerfResultCache) NextForComparison(commitNum int, builder string) (*PerfResult, error) {
for {
res, err := rc.Next(commitNum)
if err != nil {
return nil, err
}
if res == nil {
return nil, nil
}
if res.CommitNum == commitNum {
continue
}
parsed := res.ParseData()
if builder != "" {
// Comparing for a particular builder.
// This is used in perf_changes and in email notifications.
b := parsed[builder]
if b == nil || b["meta-done"] == nil {
// No results yet, must not do the comparison.
return nil, nil
}
if b["meta-done"].OK {
// Have complete results, compare.
return res, nil
}
} else {
// Comparing for all builders, find a result with at least
// one successful meta-done.
// This is used in perf_detail.
for _, benchs := range parsed {
if data := benchs["meta-done"]; data != nil && data.OK {
return res, nil
}
}
}
// Failed, try next result.
commitNum = res.CommitNum
}
}
type PerfChange struct {
Builder string
Bench string
Metric string
Old uint64
New uint64
Diff float64
}
func significantPerfChanges(pc *PerfConfig, builder string, prevRes, res *PerfResult) (changes []*PerfChange) {
// First, collect all significant changes.
for builder1, benchmarks1 := range res.ParseData() {
if builder != "" && builder != builder1 {
// This is not the builder you're looking for, Luke.
continue
}
benchmarks0 := prevRes.ParseData()[builder1]
if benchmarks0 == nil {
continue
}
for benchmark, data1 := range benchmarks1 {
data0 := benchmarks0[benchmark]
if data0 == nil {
continue
}
for metric, val := range data1.Metrics {
val0 := data0.Metrics[metric]
if val0 == 0 {
continue
}
diff := perfDiff(val0, val)
noise := pc.NoiseLevel(builder, benchmark, metric)
if isNoise(diff, noise) {
continue
}
ch := &PerfChange{Builder: builder, Bench: benchmark, Metric: metric, Old: val0, New: val, Diff: diff}
changes = append(changes, ch)
}
}
}
// Then, strip non-repeatable changes (flakes).
// The hypothesis is that a real change must show up with the majority of GOMAXPROCS values.
majority := len(pc.ProcList(builder))/2 + 1
cnt := make(map[string]int)
for _, ch := range changes {
b, _ := splitBench(ch.Bench)
name := b + "|" + ch.Metric
if ch.Diff < 0 {
name += "--"
}
cnt[name] = cnt[name] + 1
}
for i := 0; i < len(changes); i++ {
ch := changes[i]
b, _ := splitBench(ch.Bench)
name := b + "|" + ch.Metric
if cnt[name] >= majority {
continue
}
if cnt[name+"--"] >= majority {
continue
}
// Remove flake.
last := len(changes) - 1
changes[i] = changes[last]
changes = changes[:last]
i--
}
return changes
}
// orderPerfTodo reorders commit nums for benchmarking todo.
// The resulting order is somewhat tricky. We want 2 things:
// 1. benchmark sequentially backwards (this provides information about most
// recent changes, and allows to estimate noise levels)
// 2. benchmark old commits in "scatter" order (this allows to quickly gather
// brief information about thousands of old commits)
// So this function interleaves the two orders.
func orderPerfTodo(nums []int) []int {
sort.Ints(nums)
n := len(nums)
pow2 := uint32(0) // next power-of-two that is >= n
npow2 := 0
for npow2 <= n {
pow2++
npow2 = 1 << pow2
}
res := make([]int, n)
resPos := n - 1 // result array is filled backwards
present := make([]bool, n) // denotes values that already present in result array
for i0, i1 := n-1, 0; i0 >= 0 || i1 < npow2; {
// i0 represents "benchmark sequentially backwards" sequence
// find the next commit that is not yet present and add it
for cnt := 0; cnt < 2; cnt++ {
for ; i0 >= 0; i0-- {
if !present[i0] {
present[i0] = true
res[resPos] = nums[i0]
resPos--
i0--
break
}
}
}
// i1 represents "scatter order" sequence
// find the next commit that is not yet present and add it
for ; i1 < npow2; i1++ {
// do the "recursive split-ordering" trick
idx := 0 // bitwise reverse of i1
for j := uint32(0); j <= pow2; j++ {
if (i1 & (1 << j)) != 0 {
idx = idx | (1 << (pow2 - j - 1))
}
}
if idx < n && !present[idx] {
present[idx] = true
res[resPos] = nums[idx]
resPos--
i1++
break
}
}
}
// The above can't possibly be correct. Do dump check.
res2 := make([]int, n)
copy(res2, res)
sort.Ints(res2)
for i := range res2 {
if res2[i] != nums[i] {
panic(fmt.Sprintf("diff at %v: expect %v, want %v\nwas: %v\n become: %v",
i, nums[i], res2[i], nums, res2))
}
}
return res
}

Просмотреть файл

@ -1,276 +0,0 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"fmt"
"html/template"
"net/http"
"sort"
"strconv"
"google.golang.org/appengine"
"google.golang.org/appengine/datastore"
)
// perfSummaryHandler draws the main benchmarking page.
func perfChangesHandler(w http.ResponseWriter, r *http.Request) {
d := goDash
c := d.Context(appengine.NewContext(r))
page, _ := strconv.Atoi(r.FormValue("page"))
if page < 0 {
page = 0
}
pc, err := GetPerfConfig(c, r)
if err != nil {
logErr(w, r, err)
return
}
commits, err := dashPerfCommits(c, page)
if err != nil {
logErr(w, r, err)
return
}
// Fetch PerfResult's for the commits.
var uiCommits []*perfChangesCommit
rc := MakePerfResultCache(c, commits[0], false)
// But first compare tip with the last release.
if page == 0 {
res0 := &PerfResult{CommitHash: knownTags[lastRelease]}
if err := datastore.Get(c, res0.Key(c), res0); err != nil && err != datastore.ErrNoSuchEntity {
logErr(w, r, fmt.Errorf("getting PerfResult: %v", err))
return
}
if err != datastore.ErrNoSuchEntity {
uiCom, err := handleOneCommit(pc, commits[0], rc, res0)
if err != nil {
logErr(w, r, err)
return
}
uiCom.IsSummary = true
uiCom.ParentHash = lastRelease
uiCommits = append(uiCommits, uiCom)
}
}
for _, com := range commits {
uiCom, err := handleOneCommit(pc, com, rc, nil)
if err != nil {
logErr(w, r, err)
return
}
uiCommits = append(uiCommits, uiCom)
}
p := &Pagination{}
if len(commits) == commitsPerPage {
p.Next = page + 1
}
if page > 0 {
p.Prev = page - 1
p.HasPrev = true
}
data := &perfChangesData{d, p, uiCommits}
var buf bytes.Buffer
if err := perfChangesTemplate.Execute(&buf, data); err != nil {
logErr(w, r, err)
return
}
buf.WriteTo(w)
}
func handleOneCommit(pc *PerfConfig, com *Commit, rc *PerfResultCache, baseRes *PerfResult) (*perfChangesCommit, error) {
uiCom := new(perfChangesCommit)
uiCom.Commit = com
res1 := rc.Get(com.Num)
for builder, benchmarks1 := range res1.ParseData() {
for benchmark, data1 := range benchmarks1 {
if benchmark != "meta-done" || !data1.OK {
uiCom.NumResults++
}
if !data1.OK {
v := new(perfChangesChange)
v.diff = 10000
v.Style = "fail"
v.Builder = builder
v.Link = fmt.Sprintf("log/%v", data1.Artifacts["log"])
v.Val = builder
v.Hint = builder
if benchmark != "meta-done" {
v.Hint += "/" + benchmark
}
m := findMetric(uiCom, "failure")
m.BadChanges = append(m.BadChanges, v)
}
}
res0 := baseRes
if res0 == nil {
var err error
res0, err = rc.NextForComparison(com.Num, builder)
if err != nil {
return nil, err
}
if res0 == nil {
continue
}
}
changes := significantPerfChanges(pc, builder, res0, res1)
changes = dedupPerfChanges(changes)
for _, ch := range changes {
v := new(perfChangesChange)
v.Builder = builder
v.Benchmark, v.Procs = splitBench(ch.Bench)
v.diff = ch.Diff
v.Val = fmt.Sprintf("%+.2f%%", ch.Diff)
v.Hint = fmt.Sprintf("%v/%v", builder, ch.Bench)
v.Link = fmt.Sprintf("perfdetail?commit=%v&commit0=%v&builder=%v&benchmark=%v", com.Hash, res0.CommitHash, builder, v.Benchmark)
m := findMetric(uiCom, ch.Metric)
if v.diff > 0 {
v.Style = "bad"
m.BadChanges = append(m.BadChanges, v)
} else {
v.Style = "good"
m.GoodChanges = append(m.GoodChanges, v)
}
}
}
// Sort metrics and changes.
for _, m := range uiCom.Metrics {
sort.Sort(m.GoodChanges)
sort.Sort(m.BadChanges)
}
sort.Sort(uiCom.Metrics)
// Need at least one metric for UI.
if len(uiCom.Metrics) == 0 {
uiCom.Metrics = append(uiCom.Metrics, &perfChangesMetric{})
}
uiCom.Metrics[0].First = true
return uiCom, nil
}
// Find builder-procs with the maximum absolute diff for every benchmark-metric, drop the rest.
func dedupPerfChanges(changes []*PerfChange) (deduped []*PerfChange) {
maxDiff := make(map[string]float64)
maxBench := make(map[string]string)
// First, find the maximum.
for _, ch := range changes {
bench, _ := splitBench(ch.Bench)
k := bench + "|" + ch.Metric
v := ch.Diff
if v < 0 {
v = -v
}
if maxDiff[k] < v {
maxDiff[k] = v
maxBench[k] = ch.Builder + "|" + ch.Bench
}
}
// Then, remove the rest.
for _, ch := range changes {
bench, _ := splitBench(ch.Bench)
k := bench + "|" + ch.Metric
if maxBench[k] == ch.Builder+"|"+ch.Bench {
deduped = append(deduped, ch)
}
}
return
}
func findMetric(c *perfChangesCommit, metric string) *perfChangesMetric {
for _, m := range c.Metrics {
if m.Name == metric {
return m
}
}
m := new(perfChangesMetric)
m.Name = metric
c.Metrics = append(c.Metrics, m)
return m
}
type uiPerfConfig struct {
Builders []uiPerfConfigElem
Benchmarks []uiPerfConfigElem
Metrics []uiPerfConfigElem
Procs []uiPerfConfigElem
CommitsFrom []uiPerfConfigElem
CommitsTo []uiPerfConfigElem
}
type uiPerfConfigElem struct {
Name string
Selected bool
}
var perfChangesTemplate = template.Must(
template.New("perf_changes.html").Funcs(tmplFuncs).ParseFiles(templateFile("perf_changes.html")),
)
type perfChangesData struct {
Dashboard *Dashboard
Pagination *Pagination
Commits []*perfChangesCommit
}
type perfChangesCommit struct {
*Commit
IsSummary bool
NumResults int
Metrics perfChangesMetricSlice
}
type perfChangesMetric struct {
Name string
First bool
BadChanges perfChangesChangeSlice
GoodChanges perfChangesChangeSlice
}
type perfChangesChange struct {
Builder string
Benchmark string
Link string
Hint string
Style string
Val string
Procs int
diff float64
}
type perfChangesMetricSlice []*perfChangesMetric
func (l perfChangesMetricSlice) Len() int { return len(l) }
func (l perfChangesMetricSlice) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l perfChangesMetricSlice) Less(i, j int) bool {
if l[i].Name == "failure" || l[j].Name == "failure" {
return l[i].Name == "failure"
}
return l[i].Name < l[j].Name
}
type perfChangesChangeSlice []*perfChangesChange
func (l perfChangesChangeSlice) Len() int { return len(l) }
func (l perfChangesChangeSlice) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l perfChangesChangeSlice) Less(i, j int) bool {
vi, vj := l[i].diff, l[j].diff
if vi > 0 && vj > 0 {
return vi > vj
} else if vi < 0 && vj < 0 {
return vi < vj
} else {
panic("comparing positive and negative diff")
}
}

Просмотреть файл

@ -1,89 +0,0 @@
<!doctype html>
<html>
<head>
<title>{{$.Dashboard.Name}} Dashboard</title>
<link rel="stylesheet" href="/static/style.css"/>
</head>
<body>
<header id="topbar">
<h1>Go Dashboard</h1>
<nav>
<a href="{{$.Dashboard.Prefix}}/">Test</a>
<a href="{{$.Dashboard.Prefix}}/perf">Perf</a>
<a href="{{$.Dashboard.Prefix}}/perfgraph">Graphs</a>
</nav>
<div class="clear"></div>
</header>
<div class="page">
<div class="build-container">
<table class="build">
<colgroup class="col-hash"></colgroup>
<colgroup class="col-numresults"></colgroup>
<colgroup class="col-metric"></colgroup>
<colgroup class="col-result"></colgroup>
<colgroup class="col-result"></colgroup>
<colgroup class="col-user"></colgroup>
<colgroup class="col-time"></colgroup>
<colgroup class="col-desc"></colgroup>
<tbody>
{{range $c := $.Commits}}
{{range $m := $c.Metrics}}
{{if $m.First}}
<tr class="row-commit">
{{if $c.IsSummary}}
<td class="hash">tip vs {{$c.ParentHash}}</td>
{{else}}
<td class="hash"><a href="https://go-review.googlesource.com/q/{{$c.Hash}}">{{shortHash $c.Hash}}</a></td>
{{end}}
<td class="numresults">{{$c.NumResults}}</td>
{{else}}
<tr>
<td class="user">&nbsp;</td>
<td class="numresults">&nbsp;</td>
{{end}}
<td>{{$m.Name}}</td>
<td>
{{range $ch := $m.BadChanges}}
<a class="{{$ch.Style}}" href="{{$ch.Link}}" title="{{$ch.Hint}}">{{$ch.Val}}</a> &nbsp;
{{end}}
</td>
<td>
{{range $ch := $m.GoodChanges}}
<a class="{{$ch.Style}}" href="{{$ch.Link}}" title="{{$ch.Hint}}">{{$ch.Val}}</a> &nbsp;
{{end}}
</td>
{{if $m.First}}
<td class="user" title="{{$c.User}}">{{shortUser $c.User}}</td>
<td class="time">{{$c.Time.Format "Mon 02 Jan 15:04"}}</td>
<td class="desc" title="{{$c.Desc}}">{{shortDesc $c.Desc}}</td>
{{else}}
<td class="user">&nbsp;</td>
<td class="time">&nbsp;</td>
<td class="desc">&nbsp;</td>
{{end}}
</tr>
{{end}}
{{if $c.IsSummary}}
<tr class="row-commit"><td>---</td></tr>
{{end}}
{{end}}
</tbody>
</table>
{{with $.Pagination}}
<div class="paginate">
<nav>
<a {{if .HasPrev}}href="?page={{.Prev}}"{{else}}class="inactive"{{end}}>newer</a>
<a {{if .Next}}href="?page={{.Next}}"{{else}}class="inactive"{{end}}>older</a>
<a {{if .HasPrev}}href="?"{{else}}class="inactive"{{end}}>latest</a>
<a href="https://golang.org/wiki/PerfDashboard">Help</a>
</nav>
</div>
{{end}}
</div>
<div class="clear"></div>
</div>
</body>
</html>

Просмотреть файл

@ -1,213 +0,0 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"fmt"
"html/template"
"net/http"
"sort"
"strconv"
"strings"
"google.golang.org/appengine"
"google.golang.org/appengine/datastore"
)
func perfDetailUIHandler(w http.ResponseWriter, r *http.Request) {
d := goDash
c := d.Context(appengine.NewContext(r))
pc, err := GetPerfConfig(c, r)
if err != nil {
logErr(w, r, err)
return
}
kind := r.FormValue("kind")
builder := r.FormValue("builder")
benchmark := r.FormValue("benchmark")
if kind == "" {
kind = "benchmark"
}
if kind != "benchmark" && kind != "builder" {
logErr(w, r, fmt.Errorf("unknown kind %s", kind))
return
}
// Fetch the new commit.
com1 := new(Commit)
com1.Hash = r.FormValue("commit")
if hash, ok := knownTags[com1.Hash]; ok {
com1.Hash = hash
}
if err := datastore.Get(c, com1.Key(c), com1); err != nil {
logErr(w, r, fmt.Errorf("failed to fetch commit %s: %v", com1.Hash, err))
return
}
// Fetch the associated perf result.
ress1 := &PerfResult{CommitHash: com1.Hash}
if err := datastore.Get(c, ress1.Key(c), ress1); err != nil {
logErr(w, r, fmt.Errorf("failed to fetch perf result %s: %v", com1.Hash, err))
return
}
// Fetch the old commit.
var ress0 *PerfResult
com0 := new(Commit)
com0.Hash = r.FormValue("commit0")
if hash, ok := knownTags[com0.Hash]; ok {
com0.Hash = hash
}
if com0.Hash != "" {
// Have an exact commit hash, fetch directly.
if err := datastore.Get(c, com0.Key(c), com0); err != nil {
logErr(w, r, fmt.Errorf("failed to fetch commit %s: %v", com0.Hash, err))
return
}
ress0 = &PerfResult{CommitHash: com0.Hash}
if err := datastore.Get(c, ress0.Key(c), ress0); err != nil {
logErr(w, r, fmt.Errorf("failed to fetch perf result for %s: %v", com0.Hash, err))
return
}
} else {
// Don't have the commit hash, find the previous commit to compare.
rc := MakePerfResultCache(c, com1, false)
ress0, err = rc.NextForComparison(com1.Num, "")
if err != nil {
logErr(w, r, err)
return
}
if ress0 == nil {
logErr(w, r, fmt.Errorf("no previous commit with results"))
return
}
// Now that we know the right result, fetch the commit.
com0.Hash = ress0.CommitHash
if err := datastore.Get(c, com0.Key(c), com0); err != nil {
logErr(w, r, fmt.Errorf("failed to fetch commit %s: %v", com0.Hash, err))
return
}
}
res0 := ress0.ParseData()
res1 := ress1.ParseData()
var benchmarks []*uiPerfDetailBenchmark
var list []string
if kind == "builder" {
list = pc.BenchmarksForBuilder(builder)
} else {
list = pc.BuildersForBenchmark(benchmark)
}
for _, other := range list {
if kind == "builder" {
benchmark = other
} else {
builder = other
}
var procs []*uiPerfDetailProcs
allProcs := pc.ProcList(builder)
for _, p := range allProcs {
BenchProcs := fmt.Sprintf("%v-%v", benchmark, p)
if res0[builder] == nil || res0[builder][BenchProcs] == nil {
continue
}
pp := &uiPerfDetailProcs{Procs: p}
for metric, val := range res0[builder][BenchProcs].Metrics {
var pm uiPerfDetailMetric
pm.Name = metric
pm.Val0 = fmt.Sprintf("%v", val)
val1 := uint64(0)
if res1[builder] != nil && res1[builder][BenchProcs] != nil {
val1 = res1[builder][BenchProcs].Metrics[metric]
}
pm.Val1 = fmt.Sprintf("%v", val1)
v0 := val
v1 := val1
valf := perfDiff(v0, v1)
pm.Delta = fmt.Sprintf("%+.2f%%", valf)
pm.Style = perfChangeStyle(pc, valf, builder, BenchProcs, pm.Name)
pp.Metrics = append(pp.Metrics, pm)
}
sort.Sort(pp.Metrics)
for artifact, hash := range res0[builder][BenchProcs].Artifacts {
var pm uiPerfDetailMetric
pm.Val0 = fmt.Sprintf("%v", artifact)
pm.Link0 = fmt.Sprintf("log/%v", hash)
pm.Val1 = fmt.Sprintf("%v", artifact)
if res1[builder] != nil && res1[builder][BenchProcs] != nil && res1[builder][BenchProcs].Artifacts[artifact] != "" {
pm.Link1 = fmt.Sprintf("log/%v", res1[builder][BenchProcs].Artifacts[artifact])
}
pp.Metrics = append(pp.Metrics, pm)
}
procs = append(procs, pp)
}
benchmarks = append(benchmarks, &uiPerfDetailBenchmark{other, procs})
}
cfg := new(uiPerfConfig)
for _, v := range pc.BuildersForBenchmark("") {
cfg.Builders = append(cfg.Builders, uiPerfConfigElem{v, v == builder})
}
for _, v := range pc.BenchmarksForBuilder("") {
cfg.Benchmarks = append(cfg.Benchmarks, uiPerfConfigElem{v, v == benchmark})
}
data := &uiPerfDetailTemplateData{d, cfg, kind == "builder", com0, com1, benchmarks}
var buf bytes.Buffer
if err := uiPerfDetailTemplate.Execute(&buf, data); err != nil {
logErr(w, r, err)
return
}
buf.WriteTo(w)
}
func perfResultSplit(s string) (builder string, benchmark string, procs int) {
s1 := strings.Split(s, "|")
s2 := strings.Split(s1[1], "-")
procs, _ = strconv.Atoi(s2[1])
return s1[0], s2[0], procs
}
type uiPerfDetailTemplateData struct {
Dashboard *Dashboard
Config *uiPerfConfig
KindBuilder bool
Commit0 *Commit
Commit1 *Commit
Benchmarks []*uiPerfDetailBenchmark
}
type uiPerfDetailBenchmark struct {
Name string
Procs []*uiPerfDetailProcs
}
type uiPerfDetailProcs struct {
Procs int
Metrics uiPerfDetailMetrics
}
type uiPerfDetailMetric struct {
Name string
Val0 string
Val1 string
Link0 string
Link1 string
Delta string
Style string
}
type uiPerfDetailMetrics []uiPerfDetailMetric
func (l uiPerfDetailMetrics) Len() int { return len(l) }
func (l uiPerfDetailMetrics) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
func (l uiPerfDetailMetrics) Less(i, j int) bool { return l[i].Name < l[j].Name }
var uiPerfDetailTemplate = template.Must(
template.New("perf_detail.html").Funcs(tmplFuncs).ParseFiles(templateFile("perf_detail.html")),
)

Просмотреть файл

@ -1,101 +0,0 @@
<!doctype html>
<html>
<head>
<title>{{$.Dashboard.Name}} Dashboard</title>
<link rel="stylesheet" href="/static/style.css"/>
<script type="text/javascript">
function kindBuilder() {
document.getElementById('checkBuilder').checked = true;
document.getElementById('controlBuilder').style.display='inline';
document.getElementById('controlBenchmark').style.display='none';
}
function kindBenchmark() {
document.getElementById('checkBenchmark').checked = true;
document.getElementById('controlBenchmark').style.display='inline';
document.getElementById('controlBuilder').style.display='none';
}
window.onload = {{if $.KindBuilder}} kindBuilder {{else}} kindBenchmark {{end}};
</script>
</head>
<body>
<header id="topbar">
<h1>Go Dashboard</h1>
<nav>
<a href="{{$.Dashboard.Prefix}}/">Test</a>
<a href="{{$.Dashboard.Prefix}}/perf">Perf</a>
<a href="{{$.Dashboard.Prefix}}/perfgraph">Graphs</a>
</nav>
<div class="clear"></div>
</header>
<div class="page">
<div class="diff-container">
<div class="diff-meta">
<form>
<div><b>New: </b><input type="edit" name="commit" value="{{$.Commit1.Hash}}" /> {{shortUser $.Commit1.User}} {{$.Commit1.Time.Format "Mon 02 Jan 15:04"}} {{shortDesc $.Commit1.Desc}} </div>
<div><b>Old: </b><input type="edit" name="commit0" value="{{$.Commit0.Hash}}" /> {{shortUser $.Commit0.User}} {{$.Commit0.Time.Format "Mon 02 Jan 15:04"}} {{shortDesc $.Commit0.Desc}} </div>
<div>
<input id="checkBuilder" type="radio" name="kind" value="builder" required onclick="kindBuilder()">builder</input>
<input id="checkBenchmark" type="radio" name="kind" value="benchmark" required onclick="kindBenchmark()">benchmark</input>
<select id="controlBuilder" name="builder">
{{range $.Config.Builders}}
<option {{if .Selected}}selected{{end}}>{{.Name}}</option>
{{end}}
</select>
<select id="controlBenchmark" name="benchmark">
{{range $.Config.Benchmarks}}
<option {{if .Selected}}selected{{end}}>{{.Name}}</option>
{{end}}
</select>
<input type="submit" value="Refresh" />
<a href="https://golang.org/wiki/PerfDashboard">Help</a>
</div>
</form>
</div>
<p></p>
{{range $b := $.Benchmarks}}
<div class="diff-benchmark">
<h2>{{$b.Name}}</h2>
{{range $p := $b.Procs}}
<div class="diff">
<h1>GOMAXPROCS={{$p.Procs}}</h1>
<table>
<thead>
<tr>
<th>Metric</th>
<th>old</th>
<th>new</th>
<th>delta</th>
</tr>
</thead>
<tbody>
{{range $m := $p.Metrics}}
<tr>
<td class="metric">{{$m.Name}}</td>
{{if $m.Link0}}
<td><a href="{{$.Dashboard.Prefix}}/{{$m.Link0}}">{{$m.Val0}}</td>
{{else}}
<td>{{$m.Val0}}</td>
{{end}}
{{if $m.Link1}}
<td><a href="{{$.Dashboard.Prefix}}/{{$m.Link1}}">{{$m.Val1}}</td>
{{else}}
<td>{{$m.Val1}}</td>
{{end}}
<td class="result"><span class="{{$m.Style}}">{{$m.Delta}}</span></td>
</tr>
{{end}}
</tbody>
</table>
</div>
{{end}}
</div>
{{end}}
<div class="clear"></div>
</div>
<div class="clear"></div>
</div>
</body>
</html>

Просмотреть файл

@ -1,262 +0,0 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"fmt"
"html/template"
"net/http"
"strconv"
"google.golang.org/appengine"
"google.golang.org/appengine/datastore"
)
func perfGraphHandler(w http.ResponseWriter, r *http.Request) {
d := goDash
c := d.Context(appengine.NewContext(r))
pc, err := GetPerfConfig(c, r)
if err != nil {
logErr(w, r, err)
return
}
allBuilders := pc.BuildersForBenchmark("")
allBenchmarks := pc.BenchmarksForBuilder("")
allMetrics := pc.MetricsForBenchmark("")
allProcs := pc.ProcList("")
r.ParseForm()
selBuilders := r.Form["builder"]
selBenchmarks := r.Form["benchmark"]
selMetrics := r.Form["metric"]
selProcs := r.Form["procs"]
if len(selBuilders) == 0 {
selBuilders = append(selBuilders, allBuilders[0])
}
if len(selBenchmarks) == 0 {
selBenchmarks = append(selBenchmarks, "json")
}
if len(selMetrics) == 0 {
selMetrics = append(selMetrics, "time")
}
if len(selProcs) == 0 {
selProcs = append(selProcs, "1")
}
commitFrom := r.FormValue("commit-from")
if commitFrom == "" {
commitFrom = lastRelease
}
commitTo := r.FormValue("commit-to")
if commitTo == "" {
commitTo = "tip"
}
// TODO(dvyukov): validate input
// Figure out start and end commit from commitFrom/commitTo.
startCommitNum := 0
endCommitNum := 0
{
comFrom := &Commit{Hash: knownTags[commitFrom]}
if err := datastore.Get(c, comFrom.Key(c), comFrom); err != nil {
logErr(w, r, err)
return
}
startCommitNum = comFrom.Num
retry:
if commitTo == "tip" {
p, err := GetPackage(c, "")
if err != nil {
logErr(w, r, err)
return
}
endCommitNum = p.NextNum
} else {
comTo := &Commit{Hash: knownTags[commitTo]}
if err := datastore.Get(c, comTo.Key(c), comTo); err != nil {
logErr(w, r, err)
return
}
endCommitNum = comTo.Num + 1
}
if endCommitNum <= startCommitNum {
// User probably selected from:go1.3 to:go1.2. Fix go1.2 to tip.
if commitTo == "tip" {
logErr(w, r, fmt.Errorf("no commits to display (%v-%v)", commitFrom, commitTo))
return
}
commitTo = "tip"
goto retry
}
}
commitsToDisplay := endCommitNum - startCommitNum
present := func(set []string, s string) bool {
for _, s1 := range set {
if s1 == s {
return true
}
}
return false
}
cfg := &uiPerfConfig{}
for _, v := range allBuilders {
cfg.Builders = append(cfg.Builders, uiPerfConfigElem{v, present(selBuilders, v)})
}
for _, v := range allBenchmarks {
cfg.Benchmarks = append(cfg.Benchmarks, uiPerfConfigElem{v, present(selBenchmarks, v)})
}
for _, v := range allMetrics {
cfg.Metrics = append(cfg.Metrics, uiPerfConfigElem{v, present(selMetrics, v)})
}
for _, v := range allProcs {
cfg.Procs = append(cfg.Procs, uiPerfConfigElem{strconv.Itoa(v), present(selProcs, strconv.Itoa(v))})
}
for k := range knownTags {
cfg.CommitsFrom = append(cfg.CommitsFrom, uiPerfConfigElem{k, commitFrom == k})
}
for k := range knownTags {
cfg.CommitsTo = append(cfg.CommitsTo, uiPerfConfigElem{k, commitTo == k})
}
cfg.CommitsTo = append(cfg.CommitsTo, uiPerfConfigElem{"tip", commitTo == "tip"})
var vals [][]float64
var hints [][]string
var annotations [][]string
var certainty [][]bool
var headers []string
commits2, err := GetCommits(c, startCommitNum, commitsToDisplay)
if err != nil {
logErr(w, r, err)
return
}
for _, builder := range selBuilders {
for _, metric := range selMetrics {
for _, benchmark := range selBenchmarks {
for _, procs := range selProcs {
benchProcs := fmt.Sprintf("%v-%v", benchmark, procs)
vv, err := GetPerfMetricsForCommits(c, builder, benchProcs, metric, startCommitNum, commitsToDisplay)
if err != nil {
logErr(w, r, err)
return
}
hasdata := false
for _, v := range vv {
if v != 0 {
hasdata = true
}
}
if hasdata {
noise := pc.NoiseLevel(builder, benchProcs, metric)
descBuilder := "/" + builder
descBenchmark := "/" + benchProcs
descMetric := "/" + metric
if len(selBuilders) == 1 {
descBuilder = ""
}
if len(selBenchmarks) == 1 && len(selProcs) == 1 {
descBenchmark = ""
}
if len(selMetrics) == 1 && (len(selBuilders) > 1 || len(selBenchmarks) > 1 || len(selProcs) > 1) {
descMetric = ""
}
desc := fmt.Sprintf("%v%v%v", descBuilder, descBenchmark, descMetric)[1:]
hh := make([]string, commitsToDisplay)
ann := make([]string, commitsToDisplay)
valf := make([]float64, commitsToDisplay)
cert := make([]bool, commitsToDisplay)
firstval := uint64(0)
lastval := uint64(0)
for i, v := range vv {
cert[i] = true
if v == 0 {
if lastval == 0 {
continue
}
cert[i] = false
v = lastval
}
if firstval == 0 {
firstval = v
}
valf[i] = float64(v) / float64(firstval)
if cert[i] {
d := ""
if lastval != 0 {
diff := perfDiff(lastval, v)
d = fmt.Sprintf(" (%+.02f%%)", diff)
if !isNoise(diff, noise) {
ann[i] = fmt.Sprintf("%+.02f%%", diff)
}
}
hh[i] = fmt.Sprintf("%v%v", v, d)
} else {
hh[i] = "NO DATA"
}
lastval = v
}
vals = append(vals, valf)
hints = append(hints, hh)
annotations = append(annotations, ann)
certainty = append(certainty, cert)
headers = append(headers, desc)
}
}
}
}
}
var commits []perfGraphCommit
if len(vals) != 0 && len(vals[0]) != 0 {
idx := 0
for i := range vals[0] {
com := commits2[i]
if com == nil || !com.NeedsBenchmarking {
continue
}
c := perfGraphCommit{Id: idx, Name: fmt.Sprintf("%v (%v)", com.Desc, com.Time.Format("Jan 2, 2006 1:04"))}
idx++
for j := range vals {
c.Vals = append(c.Vals, perfGraphValue{float64(vals[j][i]), certainty[j][i], hints[j][i], annotations[j][i]})
}
commits = append(commits, c)
}
}
data := &perfGraphData{d, cfg, headers, commits}
var buf bytes.Buffer
if err := perfGraphTemplate.Execute(&buf, data); err != nil {
logErr(w, r, err)
return
}
buf.WriteTo(w)
}
var perfGraphTemplate = template.Must(
template.New("perf_graph.html").ParseFiles(templateFile("perf_graph.html")),
)
type perfGraphData struct {
Dashboard *Dashboard
Config *uiPerfConfig
Headers []string
Commits []perfGraphCommit
}
type perfGraphCommit struct {
Id int
Name string
Vals []perfGraphValue
}
type perfGraphValue struct {
Val float64
Certainty bool
Hint string
Ann string
}

Просмотреть файл

@ -1,120 +0,0 @@
<!doctype html>
<html>
<head>
<title>{{$.Dashboard.Name}} Dashboard</title>
<link rel="stylesheet" href="/static/style.css"/>
<style>
.graph-container { background: #eee; }
</style>
<script type="text/javascript" src="https://www.google.com/jsapi"></script>
<script type="text/javascript">
google.load("visualization", "1", {packages:["corechart"]});
google.setOnLoadCallback(drawCharts);
function drawCharts() {
var data = new google.visualization.DataTable();
data.addColumn({type: 'number', label: 'Commit'});
data.addColumn({type: 'number'});
data.addColumn({type: 'string', role: 'tooltip'});
{{range $.Headers}}
data.addColumn({type: 'number', label: '{{.}}'});
data.addColumn({type: 'boolean', role: 'certainty'});
data.addColumn({type: 'string', role: 'tooltip'});
data.addColumn({type: 'string', role: 'annotation'});
{{end}}
data.addRows([
{{range $.Commits}}
[ {{.Id}}, 1, "{{.Name}}",
{{range .Vals}}
{{if .Val}}
{{.Val}}, {{.Certainty}}, '{{.Hint}}', '{{.Ann}}',
{{else}}
,,,,
{{end}}
{{end}}
],
{{end}}
]);
new google.visualization.LineChart(document.getElementById('graph_div')).
draw(data, {
width: "100%",
height: 700,
legend: {position: "bottom"},
focusTarget: "category",
hAxis: {textPosition: "none"},
chartArea: {left: "10%", top: "5%", width: "85%", height:"80%"},
explorer: {axis: 'horizontal', maxZoomIn: 0, maxZoomOut: 1, zoomDelta: 1.2, keepInBounds: true}
})
}
</script>
</head>
<body>
<header id="topbar">
<h1>Go Dashboard</h1>
<nav>
<a href="{{$.Dashboard.Prefix}}/">Test</a>
<a href="{{$.Dashboard.Prefix}}/perf">Perf</a>
<a href="{{$.Dashboard.Prefix}}/perfgraph">Graphs</a>
</nav>
<div class="clear"></div>
</header>
<div class="page">
<div id="graph_div" class="main-content graph-container">
</div>
<aside>
<form>
<div class="panel">
<h1>Builders</h1>
{{range $.Config.Builders}}
<input type="checkbox" name="builder" value="{{.Name}}" {{if .Selected}}checked{{end}}>{{.Name}}</input><br>
{{end}}
</div>
<div class="panel">
<h1>Benchmarks</h1>
{{range $.Config.Benchmarks}}
<input type="checkbox" name="benchmark" value="{{.Name}}" {{if .Selected}}checked{{end}}>{{.Name}}</input><br>
{{end}}
</div>
<div class="panel">
<h1>Procs</h1>
{{range $.Config.Procs}}
<input type="checkbox" name="procs" value="{{.Name}}" {{if .Selected}}checked{{end}}>{{.Name}}</input><br>
{{end}}
</div>
<div class="panel">
<h1>Metrics</h1>
{{range $.Config.Metrics}}
<input type="checkbox" name="metric" value="{{.Name}}" {{if .Selected}}checked{{end}}>{{.Name}}</input><br>
{{end}}
</div>
<div class="panel">
<h1>Commits</h1>
<b>From:</b>
<select required name="commit-from">
{{range $.Config.CommitsFrom}}
<option {{if .Selected}}selected{{end}}>{{.Name}}</option>
{{end}}
</select>
<b>To:</b>
<select required name="commit-to">
{{range $.Config.CommitsTo}}
<option {{if .Selected}}selected{{end}}>{{.Name}}</option>
{{end}}
</select>
</div>
<input class="button" type="submit" value="Refresh" name="refresh"/>
<a href="https://golang.org/wiki/PerfDashboard">Help</a>
</form>
</aside>
<div class="clear"></div>
</div>
</body>
</html>

Просмотреть файл

@ -1,181 +0,0 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bytes"
"context"
"fmt"
"html/template"
"net/http"
"sort"
"google.golang.org/appengine"
"google.golang.org/appengine/datastore"
)
const (
learnPercentile = 0.95
learnSignalMultiplier = 1.1
learnMinSignal = 0.5
)
func perfLearnHandler(w http.ResponseWriter, r *http.Request) {
d := goDash
c := d.Context(appengine.NewContext(r))
pc, err := GetPerfConfig(c, r)
if err != nil {
logErr(w, r, err)
return
}
p, err := GetPackage(c, "")
if err != nil {
logErr(w, r, err)
return
}
update := r.FormValue("update") != ""
noise := make(map[string]string)
data := &perfLearnData{}
commits, err := GetCommits(c, 0, p.NextNum)
if err != nil {
logErr(w, r, err)
return
}
for _, builder := range pc.BuildersForBenchmark("") {
for _, benchmark := range pc.BenchmarksForBuilder(builder) {
for _, metric := range pc.MetricsForBenchmark(benchmark) {
for _, procs := range pc.ProcList(builder) {
values, err := GetPerfMetricsForCommits(c, builder, fmt.Sprintf("%v-%v", benchmark, procs), metric, 0, p.NextNum)
if err != nil {
logErr(w, r, err)
return
}
var dd []float64
last := uint64(0)
for i, v := range values {
if v == 0 {
if com := commits[i]; com == nil || com.NeedsBenchmarking {
last = 0
}
continue
}
if last != 0 {
v1 := v
if v1 < last {
v1, last = last, v1
}
diff := float64(v1)/float64(last)*100 - 100
dd = append(dd, diff)
}
last = v
}
if len(dd) == 0 {
continue
}
sort.Float64s(dd)
baseIdx := int(float64(len(dd)) * learnPercentile)
baseVal := dd[baseIdx]
signalVal := baseVal * learnSignalMultiplier
if signalVal < learnMinSignal {
signalVal = learnMinSignal
}
signalIdx := -1
noiseNum := 0
signalNum := 0
var diffs []*perfLearnDiff
for i, d := range dd {
if d > 3*signalVal {
d = 3 * signalVal
}
diffs = append(diffs, &perfLearnDiff{Num: i, Val: d})
if signalIdx == -1 && d >= signalVal {
signalIdx = i
}
if d < signalVal {
noiseNum++
} else {
signalNum++
}
}
diffs[baseIdx].Hint = "95%"
if signalIdx != -1 {
diffs[signalIdx].Hint = "signal"
}
diffs = diffs[len(diffs)*4/5:]
name := fmt.Sprintf("%v/%v-%v/%v", builder, benchmark, procs, metric)
data.Entries = append(data.Entries, &perfLearnEntry{len(data.Entries), name, baseVal, noiseNum, signalVal, signalNum, diffs})
if len(dd) >= 100 || r.FormValue("force") != "" {
nname := fmt.Sprintf("%v|%v-%v", builder, benchmark, procs)
n := noise[nname] + fmt.Sprintf("|%v=%.2f", metric, signalVal)
noise[nname] = n
}
}
}
}
}
if update {
var noiseLevels []string
for k, v := range noise {
noiseLevels = append(noiseLevels, k+v)
}
tx := func(c context.Context) error {
pc, err := GetPerfConfig(c, r)
if err != nil {
return err
}
pc.NoiseLevels = noiseLevels
if _, err := datastore.Put(c, PerfConfigKey(c), pc); err != nil {
return fmt.Errorf("putting PerfConfig: %v", err)
}
return nil
}
if err := datastore.RunInTransaction(c, tx, nil); err != nil {
logErr(w, r, err)
return
}
}
var buf bytes.Buffer
if err := perfLearnTemplate.Execute(&buf, data); err != nil {
logErr(w, r, err)
return
}
buf.WriteTo(w)
}
var perfLearnTemplate = template.Must(
template.New("perf_learn.html").Funcs(tmplFuncs).ParseFiles(templateFile("perf_learn.html")),
)
type perfLearnData struct {
Entries []*perfLearnEntry
}
type perfLearnEntry struct {
Num int
Name string
BaseVal float64
NoiseNum int
SignalVal float64
SignalNum int
Diffs []*perfLearnDiff
}
type perfLearnDiff struct {
Num int
Val float64
Hint string
}

Просмотреть файл

@ -1,45 +0,0 @@
<!doctype html>
<html>
<head>
<script type="text/javascript" src="https://www.google.com/jsapi"></script>
<script type="text/javascript">
google.load("visualization", "1", {packages:["corechart"]});
google.setOnLoadCallback(drawCharts);
function drawCharts() {
{
{{range $ent := $.Entries}}
var data = new google.visualization.DataTable();
data.addColumn('number', 'idx');
data.addColumn('number', '95%');
data.addColumn({type: 'boolean', role: 'certainty'});
data.addColumn('number', 'signal');
data.addColumn({type: 'boolean', role: 'certainty'});
data.addColumn('number', 'diff');
data.addColumn({type: 'string', role: 'annotation'});
data.addRows([
{{range .Diffs}} [{{.Num}}, {{$ent.BaseVal}}, false, {{$ent.SignalVal}}, false, {{.Val}}, '{{.Hint}}'], {{end}}
]);
new google.visualization.LineChart(document.getElementById('graph{{.Num}}')).
draw(data, {
width: 600,
height: 200,
legend: {position: "none"},
vAxis: {minValue: 0},
chartArea: {left: "10%", top: "1%", width: "90%", height:"95%"}
}
)
{{end}}
}
}
</script>
</head>
<body>
{{range $.Entries}}
<p>
{{.Name}}: base={{printf "%.2f[%d]" .BaseVal .NoiseNum}} signal={{printf "%.2f[%d]" .SignalVal .SignalNum}}
<div id="graph{{.Num}}" width="100px" height="100px"> </div>
</p>
{{end}}
</body>
</html>

Просмотреть файл

@ -1,13 +0,0 @@
{{if .Commit}}Change {{shortHash .Commit.Hash}} caused perf changes on {{.Builder}}:
{{.Commit.Desc}}
http://code.google.com/p/go/source/detail?r={{shortHash .Commit.Hash}}
{{else}}This changed caused perf changes on {{.Builder}}:
{{end}}
{{range $b := .Benchmarks}}
{{printf "%-16s %12s %12s %10s" $b.Name "old" "new" "delta"}}
{{range $m := $b.Metrics}}{{printf "%-16s %12v %12v %+10.2f" $m.Name $m.Old $m.New $m.Delta}}
{{end}}{{end}}
{{.Url}}

Просмотреть файл

@ -30,10 +30,6 @@ var testEntityKinds = []string{
"Commit",
"CommitRun",
"Result",
"PerfResult",
"PerfMetricRun",
"PerfConfig",
"PerfTodo",
"Log",
}
@ -147,14 +143,6 @@ var testRequests = []struct {
{"/result", nil, &Result{PackagePath: testPkg, Builder: "linux-amd64", Hash: "1003", GoHash: "0005", OK: false}, nil},
// now we must get benchmark todo
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0003", PerfResults: []string{}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "http", Hash: "0003", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0003", PerfResults: []string{"http"}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "json", Hash: "0003", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0003", PerfResults: []string{"http", "json"}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "meta-done", Hash: "0003", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0001", PerfResults: []string{}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "http", Hash: "0001", OK: true}, nil},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "meta-done", Hash: "0001", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, nil},
// create new commit, it must appear in todo
{"/commit", nil, tCommit("0006", "0005", "", true), nil},
@ -166,8 +154,6 @@ var testRequests = []struct {
{"/result", nil, &Result{PackagePath: testPkg, Builder: "linux-amd64", Hash: "1001", GoHash: "0006", OK: false}, nil},
// now we must get benchmark todo
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0006", PerfResults: []string{}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "http", Hash: "0006", OK: true}, nil},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "meta-done", Hash: "0006", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, nil},
// create new benchmark, all commits must re-appear in todo
{"/commit", nil, tCommit("0007", "0006", "", true), nil},
@ -179,16 +165,6 @@ var testRequests = []struct {
{"/result", nil, &Result{PackagePath: testPkg, Builder: "linux-amd64", Hash: "1001", GoHash: "0007", OK: false}, nil},
// now we must get benchmark todo
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0007", PerfResults: []string{}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "bson", Hash: "0007", OK: true}, nil},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "meta-done", Hash: "0007", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0007", PerfResults: []string{"bson"}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "meta-done", Hash: "0007", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0006", PerfResults: []string{"http"}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "meta-done", Hash: "0006", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0001", PerfResults: []string{"http"}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "meta-done", Hash: "0001", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0003", PerfResults: []string{"http", "json"}}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-amd64", Benchmark: "meta-done", Hash: "0003", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-amd64"}}, nil, nil},
// attach second builder
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-386"}}, nil, &Todo{Kind: "build-go-commit", Data: &Commit{Hash: "0007"}}},
@ -200,13 +176,7 @@ var testRequests = []struct {
{"/result", nil, &Result{PackagePath: testPkg, Builder: "linux-386", Hash: "1001", GoHash: "0007", OK: false}, nil},
// now we must get benchmark todo
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-386"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0007"}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-386", Benchmark: "meta-done", Hash: "0007", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-386"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0006"}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-386", Benchmark: "meta-done", Hash: "0006", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-386"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0001"}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-386", Benchmark: "meta-done", Hash: "0001", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-386"}}, nil, &Todo{Kind: "benchmark-go-commit", Data: &Commit{Hash: "0003"}}},
{"/perf-result", nil, &PerfRequest{Builder: "linux-386", Benchmark: "meta-done", Hash: "0003", OK: true}, nil},
{"/todo", url.Values{"kind": {"build-go-commit", "benchmark-go-commit"}, "builder": {"linux-386"}}, nil, nil},
}

Просмотреть файл

@ -60,18 +60,6 @@ func updateBenchmark(w http.ResponseWriter, r *http.Request) {
}
ncommit++
// create PerfResult
res := &PerfResult{CommitHash: com.Hash, CommitNum: com.Num}
err = datastore.Get(c, res.Key(c), res)
if err != nil && err != datastore.ErrNoSuchEntity {
return fmt.Errorf("fetching PerfResult: %v", err)
}
if err == datastore.ErrNoSuchEntity {
if _, err := datastore.Put(c, res.Key(c), res); err != nil {
return fmt.Errorf("putting PerfResult: %v", err)
}
}
// Update CommitRun.
if cr != nil && cr.StartCommitNum != com.Num/PerfRunLength*PerfRunLength {
if _, err := datastore.Put(c, cr.Key(c), cr); err != nil {