Bug 386671 - "Add csv export to graph server" [p=alice r=vlad]

This commit is contained in:
reed%reedloden.com 2007-07-17 21:57:51 +00:00
Родитель 7edb67d89f
Коммит be46ce806c
10 изменённых файлов: 177 добавлений и 4 удалений

Просмотреть файл

@ -66,9 +66,11 @@ try:
db.execute("CREATE TABLE dataset_extra_data (dataset_id INTEGER, time INTEGER, data BLOB);");
db.execute("CREATE TABLE annotations (dataset_id INTEGER, time INTEGER, value STRING);")
db.execute("CREATE INDEX datasets_id_idx ON dataset_values(dataset_id);")
db.execute("CREATE INDEX datasets_branchinfo_id_idx ON dataset_branchinfo(dataset_id);")
db.execute("CREATE INDEX datasets_extradata_id_idx ON dataset_extra_data(dataset_id);")
db.execute("CREATE INDEX datasets_time_idx ON dataset_values(time);")
db.execute("CREATE INDEX datasets_time_id_idx ON dataset_values(dataset_id, time);")
db.execute("CREATE INDEX datasets_info_idx on dataset_info(type, machine, test, test_type, extra_data, branch, date);")
db.execute("CREATE INDEX datasets_extra_data_supplemental_idx ON dataset_extra_data(dataset_id, time, data);")
db.commit()
except:
pass
@ -88,6 +90,9 @@ if form.has_key("filename"):
for line in val.file:
line = line.rstrip("\n\r")
contents = line.split(',')
#clear any previous content in the fields variables - stops reuse of data over lines
for field in fields:
globals()[field] = ''
if len(contents) < 7:
print "Incompatable file format"
sys.exit(500)

Просмотреть файл

@ -96,7 +96,12 @@ try:
db.execute("CREATE TABLE dataset_extra_data (dataset_id INTEGER, time INTEGER, data BLOB);");
db.execute("CREATE TABLE annotations (dataset_id INTEGER, time INTEGER, value STRING);")
db.execute("CREATE INDEX datasets_id_idx ON dataset_values(dataset_id);")
db.execute("CREATE INDEX datasets_branchinfo_id_idx ON dataset_branchinfo(dataset_id);")
db.execute("CREATE INDEX datasets_extradata_id_idx ON dataset_extra_data(dataset_id);")
db.execute("CREATE INDEX datasets_time_idx ON dataset_values(time);")
db.execute("CREATE INDEX datasets_time_id_idx ON dataset_values(dataset_id, time);")
db.execute("CREATE INDEX datasets_extra_data_supplemental_idx ON dataset_extra_data(dataset_id, time, data);")
db.commit()
except:
pass

Просмотреть файл

@ -60,6 +60,8 @@
<!-- <label for="baseline">No baseline</label><input type="radio" name="baseline" checked onclick="onNoBaseLineClick()"> -->
</br> </br>
<div><a id="linktothis" href="dgraph.html">Link to this graph</a> </div>
</br>
<div><a id="dumptocsv" href="dumpdata.cgi">Dump to csv</a> </div>
</div>
</tr>

124
webtools/new-graph/dumpdata.cgi Executable file
Просмотреть файл

@ -0,0 +1,124 @@
#!/usr/bin/env python
import cgitb; cgitb.enable()
import os
import sys
import cgi
import time
import re
import gzip
import minjson as json
import cStringIO
from pysqlite2 import dbapi2 as sqlite
DBPATH = "db/data.sqlite"
db = sqlite.connect(DBPATH)
#
# returns a plain text file containing the information for a given dataset in two csv tables
# the first table containing the dataset info (branch, date, etc)
# the second table containing the databaset values
#
# incoming query string:
#
# setid=number
# Where number is a valid setid
#
# starttime=tval
# Start time to return results from, in seconds since GMT epoch
# endtime=tval
# End time, in seconds since GMT epoch
def doError(errCode):
errString = "unknown error"
if errCode == -1:
errString = "bad tinderbox"
elif errCode == -2:
errString = "bad test name"
print "{ resultcode: " + str(errCode) + ", error: '" + errString + "' }"
def esc(val):
delim = '"'
val = delim + str(val).replace(delim, delim + delim) + delim
return val
def dumpData(fo, setid, starttime, endtime):
s1 = ""
s2 = ""
if starttime:
s1 = " AND time >= B." + starttime
if endtime:
s2 = " AND time <= B." + endtime
cur = db.cursor()
setid = ",".join(setid)
fo.write("dataset,machine,branch,test,date\n")
cur.execute("SELECT B.id, B.machine, B.branch, B.test, B.date FROM dataset_info as B WHERE id IN (%s) %s %s ORDER BY id" % (setid, s1, s2,))
for row in cur:
fo.write ('%s,%s,%s,%s,%s\n' % (esc(row[0]), esc(row[1]), esc(row[2]), esc(row[3]), esc(row[4])))
fo.write("dataset,time,value,buildid,data\n")
cur.close()
cur = db.cursor()
#cur.execute("SELECT dataset_id, time, value, branchid, data from ((dataset_values NATURAL JOIN dataset_branchinfo) NATURAL JOIN dataset_extra_data) WHERE dataset_id IN (%s) %s %s ORDER BY dataset_id, time" % (setid, s1, s2,))
cur.execute("SELECT B.dataset_id, B.time, B.value, B.branchid, B.data FROM ((dataset_values LEFT OUTER JOIN dataset_branchinfo ON dataset_values.dataset_id = dataset_branchinfo.dataset_id AND dataset_values.time = dataset_branchinfo.time) AS A LEFT OUTER JOIN dataset_extra_data ON A.dataset_id = dataset_extra_data.dataset_id AND A.time = dataset_extra_data.time) AS B WHERE dataset_id IN (%s) %s %s ORDER BY B.dataset_id, B.time" % (setid, s1, s2))
for row in cur:
fo.write ('%s,%s,%s,%s,%s\n' % (esc(row[0]), esc(row[1]), esc(row[2]), esc(row[3]), esc(row[4])))
cur.close()
#if var is a number returns a value other than None
def checkNumber(var):
if var is None:
return 1
reNumber = re.compile('^[0-9.]*$')
return reNumber.match(var)
#if var is a string returns a value other than None
def checkString(var):
if var is None:
return 1
reString = re.compile('^[0-9A-Za-z._()\- ]*$')
return reString.match(var)
doGzip = 0
try:
if "gzip" in os.environ["HTTP_ACCEPT_ENCODING"]:
doGzip = 1
except:
pass
form = cgi.FieldStorage()
for numField in ["setid"]:
val = form.getlist(numField)
for v in val:
if not checkNumber(v):
print "Invalid string arg: ", numField, " '" + v + "'"
sys.exit(500)
globals()[numField] = val
for numField in ["starttime", "endtime"]:
val = form.getfirst(numField)
if not checkNumber(val):
print "Invalid string arg: ", numField, " '" + val + "'"
sys.exit(500)
globals()[numField] = val
zbuf = cStringIO.StringIO()
zfile = zbuf
if doGzip == 1:
zfile = gzip.GzipFile(mode = 'wb', fileobj = zbuf, compresslevel = 9)
dumpData(zfile, setid, starttime, endtime)
sys.stdout.write("Content-Type: text/plain\n")
if doGzip == 1:
zfile.close()
sys.stdout.write("Content-Encoding: gzip\n")
sys.stdout.write("\n")
sys.stdout.write(zbuf.getvalue())

Просмотреть файл

@ -57,8 +57,11 @@
<div id="formend">
<input type="submit" onclick="onGraph()" value="Graph It!">
</br> </br>
<!-- <label for="baseline">No baseline</label><input type="radio" name="baseline" checked onclick="onNoBaseLineClick()"> -->
<a id="linktothis" href="graph.html">Link to this graph</a>
</br>
<div><a id="dumptocsv" href="dumpdata.cgi">Dump to csv</a> </div>
</div>
<br>
@ -68,7 +71,6 @@
<td class="graphconfig-list">
<div id="graphforms"></div>
<div id="addone">
<img src="js/img/plus.png" class="plusminus" onclick="addGraphForm()" alt="Plus">
</div>

Просмотреть файл

@ -156,6 +156,10 @@ GraphFormModule.prototype = {
+ "&" + prefix + "avg=" + (this.average? "1" : "0");
},
getDumpString: function () {
return "setid=" + this.testId;
},
onChangeTest: function (forceTestId) {
this.testId = this.testSelect.value;
},

Просмотреть файл

@ -66,6 +66,7 @@ TinderboxData.prototype = {
onDataSetAvailable: null,
defaultLoadRange: null,
raw: 0,
init: function () {
var self = this;
@ -164,7 +165,8 @@ TinderboxData.prototype = {
if (endTime)
reqstr += "&endtime=" + endTime;
//raw data is the extra_data column
reqstr += "&raw=1";
if (this.raw)
reqstr += "&raw=1";
//log (reqstr);
loadJSONDoc(reqstr)
.addCallbacks(

Просмотреть файл

@ -263,6 +263,18 @@ DiscreteGraphFormModule.prototype = {
return qstring;
},
getDumpString: function () {
var prefix = '';
var dstring = '';
for each (var opt in this.testSelect.options) {
if (opt.selected) {
dstring += prefix + "setid=" + opt.value;
prefix = "&";
}
}
return dstring;
},
onChangeTest: function (forceTestIds) {
this.testId = this.testSelect.value;
//log("setting testId: " + this.testId);

Просмотреть файл

@ -76,6 +76,7 @@ function loadingDone(graphTypePref) {
}
else {
Tinderbox = new DiscreteTinderboxData();
Tinderbox.raw = 1;
SmallPerfGraph = new DiscreteGraph("smallgraph");
BigPerfGraph = new DiscreteGraph("graph");
onDiscreteDataLoadChanged();
@ -141,6 +142,7 @@ function loadingDone(graphTypePref) {
}
updateLinkToThis();
updateDumpToCsv();
});
if (graphType == CONTINUOUS_GRAPH) {
@ -312,7 +314,7 @@ function onGraphLoadRemainder(baselineDataSet) {
if (avgds)
log ("got avgds: (", module.id, ")", avgds.firstTime, avgds.lastTime, avgds.data.length);
for each (g in [BigPerfGraph, SmallPerfGraph]) {
g.addDataSet(ds);
if (avgds)
@ -330,6 +332,7 @@ function onGraphLoadRemainder(baselineDataSet) {
//if (graphType == CONTINUOUS_GRAPH) {
updateLinkToThis();
updateDumpToCsv();
//}
} catch(e) { log(e); }
};
@ -396,6 +399,17 @@ function findGraphModule(testId) {
return null;
}
function updateDumpToCsv() {
var ds = "?"
prefix = ""
for each (var gm in GraphFormModules) {
ds += prefix + gm.getDumpString();
prefix = "&"
}
log ("ds");
getElement("dumptocsv").href = "http://" + document.location.host + "/dumpdata.cgi" + ds;
}
function updateLinkToThis() {
var qs = "";

Просмотреть файл

@ -42,8 +42,11 @@ try:
db.execute("CREATE TABLE dataset_extra_data (dataset_id INTEGER, time INTEGER, data BLOB);");
db.execute("CREATE TABLE annotations (dataset_id INTEGER, time INTEGER, value STRING);")
db.execute("CREATE INDEX datasets_id_idx ON dataset_values(dataset_id);")
db.execute("CREATE INDEX datasets_branchinfo_id_idx ON dataset_branchinfo(dataset_id);")
db.execute("CREATE INDEX datasets_extradata_id_idx ON dataset_extra_data(dataset_id);")
db.execute("CREATE INDEX datasets_time_idx ON dataset_values(time);")
db.execute("CREATE INDEX datasets_time_id_idx ON dataset_values(dataset_id, time);")
db.execute("CREATE INDEX datasets_extra_data_supplemental_idx ON dataset_extra_data(dataset_id, time, data);")
db.commit()
except:
pass