Bug 1081792 - Remove tools/page-loader/, which is ancient and unused. r=dbaron.

DONTBUILD because NPOTB.
This commit is contained in:
Nicholas Nethercote 2014-10-15 17:49:53 -07:00
Родитель 65c3211a89
Коммит beef012a79
11 изменённых файлов: 0 добавлений и 2071 удалений

Просмотреть файл

@ -1,266 +0,0 @@
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
package PageData;
use strict;
use vars qw($MagicString $ClientJS); # defined at end of file
#
# contains a set of URLs and other meta information about them
#
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = {
ClientJS => $ClientJS,
MagicString => $MagicString,
PageHash => {},
PageList => [],
Length => undef,
FileBase => undef,
HTTPBase => undef
};
bless ($self, $class);
$self->_init();
return $self;
}
#
# Parse a config file in the current directory for info.
# All requests to the current cgi-bin path will use the same info;
# to set up specialized lists, create a separate cgi-bin subdir
#
sub _init {
my $self = shift;
my $file = "urllist.txt";
open(FILE, "< $file") ||
die "Can't open file $file: $!";
while (<FILE>) {
next if /^$/;
next if /^#|^\s+#/;
s/\s+#.*$//;
if (/^HTTPBASE:\s+(.*)$/i) {
$self->{HTTPBase} = $1;
} elsif (/^FILEBASE:\s+(.*)$/i) {
$self->{FileBase} = $1;
} else {
#
# each of the remaining lines are:
# (1) the subdirectory containing the content for this URL,
# (2) the name of the top-level document [optional, default='index.html']
# (3) mime type for this document [optional, default is text/html]
# (4) a character set for this document [optional, default is none]
# e.g.,
# home.netscape.com
# www.mozilla.org index.html
# www.aol.com default.xml text/xml
# www.jp.aol.com index.html text/html Shift_JIS
#
my @ary = split(/\s+/, $_);
$ary[1] ||= 'index.html';
push @{$self->{PageList}}, { Name => $ary[0],
URL => $ary[0] . '/' . $ary[1],
MimeType => $ary[2] || "text/html",
CharSet => $ary[3] || ''
};
}
}
# check that we have enough to go on
die "Did not read any URLs" unless scalar(@{$self->{PageList}});
die "Did not read a value for the http base" unless $self->{HTTPBase};
die "Did not read a value for the file base" unless $self->{FileBase};
$self->{Length} = scalar(@{$self->{PageList}});
$self->_createHashView();
}
sub _createHashView {
# repackages the array, so it can be referenced by name
my $self = shift;
for my $i (0..$self->lastidx) {
my $hash = $self->{PageList}[$i];
#warn $i, " ", $hash, " ", %$hash;
$self->{PageHash}{$hash->{Name}} = {
Index => $i,
URL => $hash->{URL},
};
}
}
sub filebase { my $self = shift; return $self->{FileBase}; }
sub httpbase { my $self = shift; return $self->{HTTPBase}; }
sub length { my $self = shift; return $self->{Length}; }
sub lastidx { my $self = shift; return $self->{Length} - 1; }
sub magicString { my $self = shift; return $self->{MagicString}; }
sub clientJS { my $self = shift; return $self->{ClientJS}; }
sub url {
# get the relative url by index or by name
my $self = shift;
my $arg = shift;
if ($arg =~ /^\d+$/) {
return $self->_checkIndex($arg) ? $self->{PageList}[$arg]{URL} : "";
} else {
return $self->{PageHash}{$arg}{URL};
}
}
sub charset {
# get the charset for this URL, by index
my $self = shift;
my $arg = shift;
if ($arg =~ /^\d+$/) {
return $self->_checkIndex($arg) ? $self->{PageList}[$arg]{CharSet} : "";
} else {
die "$arg' is not a numeric index";
}
}
sub mimetype {
# get the mimetype for this URL, by index
my $self = shift;
my $arg = shift;
if ($arg =~ /^\d+$/) {
return $self->_checkIndex($arg) ? $self->{PageList}[$arg]{MimeType} : "";
} else {
die "$arg' is not a numeric index";
}
}
sub name {
my $self = shift;
my $arg = shift;
if ($arg =~ /^\d+$/) {
return $self->_checkIndex($arg) ? $self->{PageList}[$arg]{Name} : "";
} else {
#warn "You looked up the name using a name.";
return $arg;
}
}
sub index {
my $self = shift;
my $arg = shift;
if ($arg =~ /^\d+$/) {
#warn "You looked up the index using an index.";
return $arg;
} else {
return $self->{PageHash}{$arg}{Index};
}
}
sub _checkIndex {
my $self = shift;
my $idx = shift;
die "Bogus index passed to PageData: $idx"
unless defined($idx) &&
$idx =~ /^\d+$/ &&
$idx >= 0 &&
$idx < $self->{Length};
return 1;
}
#
# JS to insert in the static HTML pages to trigger client timimg and reloading.
# You must escape any '$', '@', '\n' contained in the JS code fragment. Otherwise,
# perl will attempt to interpret them, and silently convert " $foo " to " ".
#
# JS globals have been intentionally "uglified" with 'moztest_', to avoid collision
# with existing content in the page
#
$MagicString = '<!-- MOZ_INSERT_CONTENT_HOOK -->';
$ClientJS =<<"ENDOFJS";
//<![CDATA[
function moztest_tokenizeQuery() {
var query = {};
var pairs = document.location.search.substring(1).split('&');
for (var i=0; i < pairs.length; i++) {
var pair = pairs[i].split('=');
query[pair[0]] = unescape(pair[1]);
}
return query;
}
function moztest_setLocationHref(href, useReplace) {
// false => "Location.href=url", not ".replace(url)"
if (useReplace) {
document.location.replace(href);
} else {
document.location.href = href;
}
}
var g_moztest_Href;
function moztest_nextRequest(c_part) {
function getValue(arg,def) {
return !isNaN(arg) ? parseInt(Number(arg)) : def;
}
var q = moztest_tokenizeQuery();
var index = getValue(q['index'], 0);
var cycle = getValue(q['cycle'], 0);
var maxcyc = getValue(q['maxcyc'], 1);
var replace = getValue(q['replace'], 0);
var nocache = getValue(q['nocache'], 0);
var delay = getValue(q['delay'], 0);
var timeout = getValue(q['timeout'], 30000);
var c_ts = getValue(q['c_ts'], Number.NaN);
// check for times
var now = (new Date()).getTime();
var c_intvl = now - c_ts;
var c_ts = now + delay; // adjust for delay time
// Now make the request ...
g_moztest_Href = document.location.href.split('?')[0] +
"?c_part=" + c_part +
"&index=" + ++index + // increment the request index
"&id=" + q['id'] +
"&maxcyc=" + maxcyc +
"&replace=" + replace +
"&nocache=" + nocache +
"&delay=" + delay +
"&timeout=" + timeout +
"&c_intvl=" + c_intvl +
"&s_ts=" + g_moztest_ServerTime +
"&c_ts=" + c_ts +
"&content=" + g_moztest_Content;
window.setTimeout("moztest_setLocationHref(g_moztest_Href,false);", delay);
return true;
}
function moztest_onDocumentLoad() {
var loadTime = (new Date()).getTime() - g_moztest_Start;
window.clearTimeout(g_moztest_safetyTimer); // the onload has fired, clear the safety
moztest_nextRequest(loadTime);
}
function moztest_safetyValve() {
moztest_nextRequest(Number.NaN); // if the onload never fires
}
// normal processing is to calculate load time and fetch another URL
window.onload = moztest_onDocumentLoad;
//]]>
ENDOFJS
1; # return true from module

Просмотреть файл

@ -1,206 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
Rough notes on setting up this test app. jrgm@netscape.com 2001/08/05
1) this is intended to be run as a mod_perl application under an Apache web
server. [It is possible to run it as a cgi-bin, but then you will be paying
the cost of forking perl and re-compiling all the required modules on each
page load].
2) it should be possible to run this under Apache on win32, but I expect that
there are *nix-oriented assumptions that have crept in. (You would also need
a replacement for Time::HiRes, probably by using Win32::API to directly
call into the system to Windows 'GetLocalTime()'.)
3) You need to have a few "non-standard" Perl Modules installed. This script
will tell you which ones are not installed (let me know if I have left some
out of this test).
--8<--------------------------------------------------------------------
#!/usr/bin/perl
my @modules = qw{
LWP::UserAgent SQL::Statement Text::CSV_XS DBD::CSV
DBI Time::HiRes CGI::Request URI
MIME::Base64 HTML::Parser HTML::Tagset Digest::MD5
};
for (@modules) {
printf "%20s", $_;
eval "use $_;";
if ($@) {
print ", I don't have that.\n";
} else {
print ", version: ", eval "\$" . "$_" . "::VERSION", "\n";
}
}
--8<--------------------------------------------------------------------
For modules that are missing, you can find them at http://www.cpan.org/.
Download the .tar.gz files you need, and then (for the most part) just
do 'perl Makefile.PL; make; make test; make install'.
[Update: 28-Mar-2003] I recently installed Redhat 7.2, as server, which
installed Apache 1.3.20 with mod_perl 1.24 and perl 5.6.0. I then ran the
CPAN shell (`perl -MCPAN -e shell') and after completing configuration, I
did 'install Bundle::CPAN', 'install Bundle::LWP' and 'install DBI' to
upgrade tose modules and their dependencies. These instructions work on OSX
as well, make sure you run the CPAN shell with sudo so you have sufficient
privs to install the files.
CGI::Request seems to have disappeared from CPAN, but you can get a copy
from <http://stein.cshl.org/WWW/software/CGI::modules/> and then install
with the standard `perl Makefile.PL; make; make test; make install'.
To install the SQL::Statement, Text::CSV_XS, and DBD::CSV modules, there is
a bundle available on CPAN, so you can use the CPAN shell and just enter
'install Bundle::DBD::CSV'.
At the end of this, the output for the test program above was the
following. (Note: you don't necessarily have to have the exact version
numbers for these modules, as far as I know, but something close would be
safest).
LWP::UserAgent, version: 2.003
SQL::Statement, version: 1.005
Text::CSV_XS, version: 0.23
DBD::CSV, version: 0.2002
DBI, version: 1.35
Time::HiRes, version: 1.43
CGI::Request, version: 2.75
URI, version: 1.23
MIME::Base64, version: 2.18
HTML::Parser, version: 3.27
HTML::Tagset, version: 3.03
Digest::MD5, version: 2.24
4) There is code to draw a sorted graph of the final results, but I have
disabled the place in 'report.pl' where its use would be triggered (look
for the comment). This is so that you can run this without having gone
through the additional setup of the 'gd' library, and the modules GD and
GD::Graph. If you have those in place, you can turn this on by just
reenabling the print statement in report.pl
[Note - 28-Mar-2003: with Redhat 7.2, libgd.so.1.8.4 is preinstalled to
/usr/lib. The current GD.pm modules require libgd 2.0.5 or higher, but you
use 1.8.4 if you install GD.pm version 1.40, which is available at
<http://stein.cshl.org/WWW/software/GD/old/GD-1.40.tar.gz>. Just do 'perl
Makefile.PL; make; make install' as usual. I chose to build with JPEG
support, but without FreeType, XPM and GIF support. I had a test error when
running 'make test', but it works fine for my purposes. I then installed
'GD::Text' and 'GD::Graph' from the CPAN shell.]
5) To set this up with Apache, create a directory in the cgi-bin for the web
server called e.g. 'page-loader'.
5a) For Apache 1.x/mod_perl 1.x, place this in the Apache httpd.conf file,
and skip to step 5c.
--8<--------------------------------------------------------------------
Alias /page-loader/ /var/www/cgi-bin/page-loader/
<Location /page-loader>
SetHandler perl-script
PerlHandler Apache::Registry
PerlSendHeader On
Options +ExecCGI
</Location>
--8<--------------------------------------------------------------------
[MacOSX note: The CGI folder lives in /Library/WebServer/CGI-Executables/
so the Alias line above should instead read:
Alias /page-loader/ /Library/WebServer/CGI-Executables/page-loader
Case is important (even though the file system is case-insensitive) and
if you type it incorrectly you will get "Forbidden" HTTP errors.
In addition, perl (and mod_perl) aren't enabled by default. You need to
uncomment two lines in httpd.conf:
LoadModule perl_module libexec/httpd/libperl.so
AddModule mod_perl.c
(basically just search for "perl" and uncomment the lines you find).]
5b) If you're using Apache 2.x and mod_perl 1.99/2.x (tested with Red Hat 9),
place this in your perl.conf or httpd.conf:
--8<--------------------------------------------------------------------
Alias /page-loader/ /var/www/cgi-bin/page-loader/
<Location /page-loader>
SetHandler perl-script
PerlResponseHandler ModPerl::RegistryPrefork
PerlOptions +ParseHeaders
Options +ExecCGI
</Location>
--8<--------------------------------------------------------------------
If your mod_perl version is less than 1.99_09, then copy RegistryPrefork.pm
to your vendor_perl ModPerl directory (for example, on Red Hat 9, this is
/usr/lib/perl5/vendor_perl/5.8.0/i386-linux-thread-multi/ModPerl).
If you are using mod_perl 1.99_09 or above, grab RegistryPrefork.pm from
http://perl.apache.org/docs/2.0/user/porting/compat.html#C_Apache__Registry___C_Apache__PerlRun__and_Friends
and copy it to the vendor_perl directory as described above.
5c) When you're finished, restart Apache. Now you can run this as
'http://yourserver.domain.com/page-loader/loader.pl'
6) You need to create a subdirectory call 'db' under the 'page-loader'
directory. This subdirectory 'db' must be writeable by UID that Apache
executes as (e.g., 'nobody' or 'apache'). [You may want to figure out some
other way to do this if this web server is not behind a firewall].
7) You need to assemble a set of content pages, with all images, included JS
and CSS pulled to the same directory. These pages can live anywhere on the
same HTTP server that is running this app. The app assumes that each page
is in its own sub-directory, with included content below that
directory. You can set the location and the list of pages in the file
'urllist.txt'. [See 'urllist.txt' for further details on what needs to be
set there.]
There are various tools that will pull in complete copies of web pages
(e.g. 'wget' or something handrolled from LWP::UserAgent). You should edit
the pages to remove any redirects, popup windows, and possibly any platform
specific JS rules (e.g., Mac specific CSS included with
'document.write("LINK...'). You should also check that for missing content,
or URLs that did not get changed to point to the local content. [One way to
check for this is tweak this simple proxy server to check your links:
http://www.stonehenge.com/merlyn/WebTechniques/col34.listing.txt)
[MacOSX note: The web files live in /Library/WebServer/Documents, so you will
need to modify urllist.txt to have the appropriate FILEBASE and HTTPBASE.]
8) The "hook" into the content is a single line in each top-level document like this:
<!-- MOZ_INSERT_CONTENT_HOOK -->
which should be placed immediately after the opening <HEAD> element. The script uses
this as the way to substitute a BASE HREF and some JS into the page which will control
the exectution of the test.
9) You will most likely need to remove all load event handlers from your
test documents (onload attribute on body and handlers added with
addEventListener).
10) Because the system uses (X)HTML base, and some XML constructs are not
subject to that (for example xml-stylesheet processing instructions),
you may need to provide the absolute path to external resources.
11) If your documents are tranformed on the client side with XSLT, you will
need to add this snippet of XSLT to your stylesheet (and possibly make
sure it does not conflict with your other rules):
--8<--------------------------------------------------------------------
<!-- Page Loader -->
<xsl:template match="html:script">
<xsl:copy>
<xsl:apply-templates/>
</xsl:copy>
<xsl:for-each select="@*">
<xsl:copy/>
</xsl:for-each>
</xsl:template>
--8<--------------------------------------------------------------------
And near the top of your output rules add:
<xsl:apply-templates select="html:script"/>
Finally make sure you define the XHTML namespace in the stylesheet
with "html" prefix.
12) I've probably left some stuff out. Bug jrgm@netscape.com for the missing stuff.

Просмотреть файл

@ -1,35 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
package ModPerl::RegistryPrefork;
# RegistryPrefork.pm originally from
# http://perl.apache.org/docs/2.0/user/porting/compat.html#Code_Porting
# backported for mod_perl <= 1.99_08
use strict;
use warnings FATAL => 'all';
our $VERSION = '0.01';
use base qw(ModPerl::Registry);
use File::Basename ();
use constant FILENAME => 1;
sub handler : method {
my $class = (@_ >= 2) ? shift : __PACKAGE__;
my $r = shift;
return $class->new($r)->default_handler();
}
sub chdir_file {
my $file = @_ == 2 ? $_[1] : $_[0]->[FILENAME];
my $dir = File::Basename::dirname($file);
chdir $dir or die "Can't chdir to $dir: $!";
}
1;
__END__

Просмотреть файл

@ -1,243 +0,0 @@
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
package URLTimingDataSet;
use DBI;
use PageData; # list of test pages, etc.
use strict;
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = {
dataset => [],
results => [],
sorted => [],
average => undef,
avgmedian => undef, # note: average of individual medians
maximum => undef,
minimum => undef,
};
$self->{id} = shift || die "No id supplied";
$self->{table} = shift || "t" . $self->{id};
$self->{pages} = PageData->new;
bless ($self, $class);
$self->_grok();
return $self;
}
sub _grok {
my $self = shift;
my ($res);
# select the dataset from the db
$self->_select();
for (my $i=0; $i < $self->{pages}->length; $i++) {
my $name = $self->{pages}->name($i);
my $count = 0;
my @times = ();
my $nan = 0;
foreach my $ref (@{$self->{dataset}}) {
next if ($name ne $ref->{content});
$count++;
if ($ref->{c_part} eq "NaN") {
# we bailed out of this page load
$res = "NaN";
$nan = 1;
}
else {
my $s_intvl = $ref->{s_intvl};
my $c_intvl = $ref->{c_intvl};
my $errval = abs($s_intvl-$c_intvl)/(($s_intvl+$c_intvl)/2);
if ($errval > 0.08) { # one of them went wrong and stalled out (see [1] below)
$res = ($s_intvl <= $c_intvl) ? $s_intvl : $c_intvl;
} else {
$res = int(($s_intvl + $c_intvl)/2);
}
}
push @times, $res;
}
my $avg = int(_avg(@times));
my $med = _med(@times);
my $max = $nan ? "NaN" : _max(@times);
my $min = _min(@times);
push @{$self->{results}}, [ $i, $name, $count, $avg, $med, $max, $min, @times ];
}
$self->_get_summary();
$self->_sort_result_set();
}
sub _select {
my $self = shift;
my $dbh = DBI->connect("DBI:CSV:f_dir=./db", {RaiseError => 1, AutoCommit => 1})
or die "Cannot connect: " . $DBI::errstr;
my $sql = qq{
SELECT INDEX, S_INTVL, C_INTVL, C_PART, CONTENT, ID
FROM $self->{table}
WHERE ID = '$self->{id}'
};
my $sth = $dbh->prepare($sql);
$sth->execute();
while (my @data = $sth->fetchrow_array()) {
push @{$self->{dataset}},
{index => $data[0],
s_intvl => $data[1],
c_intvl => $data[2],
c_part => $data[3],
content => $data[4],
id => $data[5]
};
}
$sth->finish();
$dbh->disconnect();
}
sub _get_summary {
my $self = shift;
my (@avg, @med, @max, @min);
# how many pages were loaded in total ('sampled')
$self->{samples} = scalar(@{$self->{dataset}});
# how many cycles (should I get this from test parameters instead?)
$self->{count} = int(_avg( map($_->[2], @{$self->{results}}) ));
#warn $self->{count};
# calculate overall average, average median, maximum, minimum, (RMS Error?)
for (@{$self->{results}}) {
push @avg, $_->[3];
push @med, $_->[4];
push @max, $_->[5];
push @min, $_->[6];
}
$self->{average} = int(_avg(@avg));
$self->{avgmedian} = int(_avg(@med)); # note: averaging individual medians
$self->{maximum} = _max(@max);
$self->{minimum} = _min(@min);
}
sub _sort_result_set {
my $self = shift;
# sort by median load time
# @{$self->{sorted}} = sort {$a->[4] <=> $b->[4]} @{$self->{results}};
# might be "NaN", but this is lame of me to be carrying around a string instead of undef
@{$self->{sorted}} =
sort {
if ($a->[4] eq "NaN" || $b->[4] eq "NaN") {
return $a->[4] cmp $b->[4];
} else {
return $a->[4] <=> $b->[4];
}
} @{$self->{results}};
}
sub as_string {
my $self = shift;
return $self->_as_string();
}
sub as_string_sorted {
my $self = shift;
return $self->_as_string(@{$self->{sorted}});
}
sub _as_string {
my $self = shift;
my @ary = @_ ? @_ : @{$self->{results}};
my $str;
for (@ary) {
my ($index, $path, $count, $avg, $med, $max, $min, @times) = @$_;
$str .= sprintf "%3s %-26s\t", $index, $path;
if ($count > 0) {
$str .= sprintf "%6s %6s %6s %6s ", $avg, $med, $max, $min;
foreach my $time (@times) {
$str .= sprintf "%6s ", $time;
}
}
$str .= "\n";
}
return $str;
}
#
# package internal helper functions
#
sub _num {
my @array = ();
for (@_) { push @array, $_ if /^[+-]?\d+\.?\d*$/o; }
return @array;
}
sub _avg {
my @array = _num(@_);
return "NaN" unless scalar(@array);
my $sum = 0;
for (@array) { $sum += $_; }
return $sum/scalar(@array);
}
sub _max {
my @array = _num(@_);
return "NaN" unless scalar(@array);
my $max = $array[0];
for (@array) { $max = ($max > $_) ? $max : $_; }
return $max;
}
sub _min {
my @array = _num(@_);
return "NaN" unless scalar(@array);
my $min = $array[0];
for (@array) { $min = ($min < $_) ? $min : $_; }
return $min;
}
# returns the floor(N/2) element of a sorted ascending array
sub _med {
my @array = _num(@_);
return "NaN" unless scalar(@array);
my $index = int((scalar(@array)-1)/2);
@array = sort {$a <=> $b} @array;
return $array[$index];
}
1; # return true
################################################################################
#
# [1] in looking at the test results, in almost all cases, the
# round-trip time measured by the server logic and the client logic
# would be almost the same value (which is what one would
# expect). However, on occasion, one of the them would be "out of
# whack", and inconsistent with the additional "layout" measure by the
# client.
#
# i.e., a set of numbers like these:
# c_part c_intvl s_intvl
# 800 1003 997
# 804 1007 1005
# 801 1001 1325 <--
# 803 1318 998 <--
# 799 1002 1007
# ...
#
# which looks like the server side would stall in doing the accept or
# in running the mod-perl handler (possibly a GC?). (The following
# c_intvl would then be out of whack by a matching amount on the next
# cycle).
#
# At any rate, since it was clear from comparing with the 'c_part'
# measure, which of the times was bogus, I just use an arbitrary error
# measure to determine when to toss out the "bad" value.
#

Просмотреть файл

@ -1,86 +0,0 @@
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
package URLTimingGraph;
use strict;
use GD;
use GD::Graph::linespoints;
use GD::Graph::points;
use GD::Graph::lines;
use GD::Graph::mixed;
use GD::Graph::colour;
use GD::Graph::Data;
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = {};
bless ($self, $class);
$self->{data} = shift || die "No data.";
my $args = shift || {};
$self->{cgimode} = $args->{cgimode} || 0;
$self->{title} = $args->{title} || "";
$self->{types} = $args->{types} || ['lines', undef, undef, undef, undef, undef, undef];
$self->{dclrs} = $args->{dclrs} || [qw(lred)];
$self->{legend} = $args->{legend} || [qw(undef)];
$self->{y_max_value} = $args->{y_max_value} || 10000;
$self->{width} = $args->{width} || 800;
$self->{height} = $args->{height} || 720;
return $self;
}
sub _set_standard_options {
my $self = shift;
$self->{graph}->set(
x_label => '',
y_label => 'Page Load Time (msec)',
default_type => 'points',
x_labels_vertical => 1,
y_long_ticks => 1,
x_tick_length => 8,
x_long_ticks => 0,
line_width => 2,
marker_size => 3,
markers => [8],
show_values => 0,
transparent => 0,
interlaced => 1,
skip_undef => 1,
)
|| warn $self->{graph}->error;
$self->{graph}->set_title_font(GD::Font->Giant);
$self->{graph}->set_x_label_font(GD::Font->Large);
$self->{graph}->set_y_label_font(GD::Font->Large);
$self->{graph}->set_x_axis_font(GD::Font->Large);
$self->{graph}->set_y_axis_font(GD::Font->Large);
$self->{graph}->set_legend_font(GD::Font->Giant);
}
sub plot {
my $self = shift;
$self->{graph} = new GD::Graph::mixed($self->{width},
$self->{height});
$self->_set_standard_options();
$self->{graph}->set(title => $self->{title},
types => $self->{types},
y_max_value => $self->{y_max_value},
dclrs => $self->{dclrs},
)
|| warn $self->{graph}->error;
$self->{graph}->set_legend( @{$self->{legend}} );
# draw the graph image
$self->{graph}->plot($self->{data}) ||
die $self->{graph}->error;
# send it back to stdout (or browser)
print "Content-type: image/png\n\n" if $self->{cgimode};
binmode STDOUT;
print $self->{graph}->gd->png();
}
1; #return true

Просмотреть файл

@ -1,269 +0,0 @@
#!/usr/bin/perl
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
use DBI;
use CGI::Carp qw(fatalsToBrowser);
use CGI::Request;
use URLTimingDataSet;
use File::Copy ();
use strict;
use vars qw($dbh $arc $dbroot); # current db, and db/archive
use constant STALE_AGE => 5 * 60; # seconds
# show a chart of this run; turned off in automated tests, and where
# an installation hasn't set up the required modules and libraries
use constant SHOW_CHART => 0;
sub createArchiveMetaTable {
my $table = "tMetaTable";
return if -e "$dbroot/archive/$table"; # don't create it if it exists
warn "createMetaTable:\t$dbroot/archive/$table";
mkdir "$dbroot/archive" unless -d "$dbroot/archive";
my ($sth, $sql);
$sql = qq{
CREATE TABLE tMetaTable
(DATETIME CHAR(14), LASTPING CHAR(14),
ID CHAR(8), INDEX INTEGER,
CUR_IDX INTEGER, CUR_CYC INTEGER,
CUR_CONTENT CHAR(128), STATE INTEGER,
BLESSED INTEGER, MAXCYC INTEGER,
MAXIDX INTEGER, REPLACE INTEGER,
NOCACHE INTEGER, DELAY INTEGER,
REMOTE_USER CHAR(16), HTTP_USER_AGENT CHAR(128),
REMOTE_ADDR CHAR(15), USER_EMAIL CHAR(32),
USER_COMMENT CHAR(256)
)
};
$sth = $arc->prepare($sql);
$sth->execute();
$sth->finish();
warn 'created archive meta table';
return 1;
}
sub purgeStaleEntries {
my $id = shift;
my $metatable = "tMetaTable";
# first, remove dead stuff
my $sql = qq{SELECT * FROM $metatable
WHERE STATE = "INIT" OR STATE = "OPEN"};
my $sth = $dbh->prepare($sql);
$sth->execute();
my $now = time();
my $status;
while (my @data = $sth->fetchrow_array()) {
my $age = $now - timestamp2Time($data[1]);
# if OPEN or INIT, and not heard from in 10 minutes, then it's never coming
# back here again. Delete the entry. Whine in the error_log.
if ($age > STALE_AGE) {
warn "deleting stale record+table, id = $data[2], last = $data[1], @data";
$dbh->do( qq(DELETE FROM $metatable WHERE ID = "$data[2]") );
$dbh->do("DROP TABLE t" . $data[2]);
}
$status .= "$age @data\n";
}
$sth->finish();
# now move any COMPLETE records to archive
$sql = qq{SELECT * FROM $metatable};
$sth = $dbh->prepare($sql);
$sth->execute();
$now = time();
while (my @data = $sth->fetchrow_array()) {
my $age = $now - timestamp2Time($data[1]);
# This keeps the "live" entries from growing too slow.
# If COMPLETE and older than 10 minutes, move to archive.
if ($age > STALE_AGE) {
warn "moving COMPLETE record+table, id = $data[2], last = $data[1], @data";
moveRecordToArchive($data[2], \@data);
$dbh->do( qq(DELETE FROM $metatable WHERE ID = "$data[2]") );
}
}
$sth->finish();
if (!SHOW_CHART) {
# Don't move it if showing a chart. (Otherwise, if showing a
# a chart, I'd have to do a little extra work to make sure I
# didn't yank the record away from the IMG request)
$sql = qq{SELECT * FROM $metatable WHERE ID = "$id"};
$sth = $dbh->prepare($sql);
$sth->execute();
while (my @data = $sth->fetchrow_array()) {
warn "moving COMPLETE record+table, id = $id, @data\n";
moveRecordToArchive($data[2], \@data);
$dbh->do( qq(DELETE FROM $metatable WHERE ID = "$data[2]") );
}
}
$sth->finish();
}
sub moveRecordToArchive {
my $id = shift || die "no id";
my $dataref = shift || die "no dataref";
createArchiveMetaTable(); # if it doesn't exist
insertIntoMetaTable($dataref);
File::Copy::move("$dbroot/t$id", "$dbroot/archive/t$id");
}
sub insertIntoMetaTable {
my $dataref = shift || die "no dataref";
my $table = "tMetaTable";
my ($sth, $sql);
$sql = qq{
INSERT INTO $table
(DATETIME, LASTPING, ID,
INDEX, CUR_IDX, CUR_CYC,
CUR_CONTENT, STATE, BLESSED,
MAXCYC, MAXIDX, REPLACE,
NOCACHE, DELAY, REMOTE_USER,
HTTP_USER_AGENT, REMOTE_ADDR, USER_EMAIL,
USER_COMMENT
)
VALUES (?,?,?,?,
?,?,?,?,
?,?,?,?,
?,?,?,?,
?,?,?)
};
$sth = $arc->prepare($sql);
$sth->execute(@$dataref);
$sth->finish();
}
sub timestamp2Time ($) {
my $str = shift;
use Time::Local ();
my @datetime = reverse unpack 'A4A2A2A2A2A2', $str;
--$datetime[4]; # month: 0-11
return Time::Local::timelocal(@datetime);
}
sub serializeDataSet {
# package up this data for storage elsewhere
my $rs = shift;
my $data = "avgmedian|" . $rs->{avgmedian};
$data .= "|average|" . $rs->{average};
$data .= "|minimum|" . $rs->{minimum};
$data .= "|maximum|" . $rs->{maximum};
$_ = $rs->as_string;
s/^\s+//gs;
s/\s+\n$//gs;
s/\s*\n/\|/gs; # fold newlines
s/\|\s+/\|/gs;
s/\s+/;/gs;
return $data . ":" . $_;
}
#
# handle the request
#
my $request = new CGI::Request;
my $id = $request->param('id'); #XXX need to check for valid parameter id
my $rs = URLTimingDataSet->new($id);
print "Content-type: text/html\n\n";
# This sucks: we'll let the test time out to avoid crash-on-shutdown bugs
print "<html><body onload='window.close();'>";
#
# dump some stats for tinderbox to snarf
#
print "<script>\n";
print "if (window.dump) dump('";
print "Starting Page Load Test\\n\\\n";
print "Test id: $id\\n\\\n";
print "Avg. Median : ", $rs->{avgmedian}, " msec\\n\\\n";
print "Average : ", $rs->{average}, " msec\\n\\\n";
print "Minimum : ", $rs->{minimum}, " msec\\n\\\n";
print "Maximum : ", $rs->{maximum}, " msec\\n\\\n";
print "IDX PATH AVG MED MAX MIN TIMES ...\\n\\\n";
if ($request->param('sort')) {
$_ = $rs->as_string_sorted();
} else {
$_ = $rs->as_string();
}
#
# Terminate raw newlines with '\n\' so we don't have an unterminated string literal.
#
s/\n/\\n\\\n/g;
print $_;
print "(tinderbox dropping follows)\\n\\\n";
print "_x_x_mozilla_page_load," , $rs->{avgmedian}, ",", $rs->{maximum}, ",", $rs->{minimum}, "\\n\\\n";
#
# package up this data for storage elsewhere
#
my $data = serializeDataSet($rs);
print "_x_x_mozilla_page_load_details,", $data, "\\n\\\n";
#
# average median
#
#print "TinderboxPrint:<a title=\"Avg. of the median per url pageload time.\" href=\"http://tegu.mozilla.org/graph/query.cgi?tbox=spider&testname=pageload&autoscale=1&days=7&avg=1\">Tp:", $rs->{avgmedian}, "ms</a>", "\\n\\\n";
print "');";
print "</script></body></html>\n";
#
# If this is SurfingSafari, then catch a wave and you're sitting on top of the world!!
# (and also blat this out to tegu, cause we got no 'dump' statement.
#
if ($request->cgi->var("HTTP_USER_AGENT") =~ /Safari/) {
my %machineMap =
(
"10.169.105.26" => "boxset",
"10.169.105.21" => "pawn"
);
my $ip = $request->cgi->var('REMOTE_ADDR');
my $machine = $machineMap{$ip};
my $res = eval q{
use LWP::UserAgent;
use HTTP::Request::Common qw(POST);
my $ua = LWP::UserAgent->new;
$ua->timeout(10); # seconds
my $req = POST('http://tegu.mozilla.org/graph/collect.cgi',
[testname => 'pageload',
tbox => "$machine" . "-aux",
value => $rs->{avgmedian},
data => $data]);
my $res = $ua->request($req);
return $res;
};
if ($@) {
warn "Failed to submit startup results: $@";
} else {
warn "Startup results submitted to server: \n",
$res->status_line, "\n", $res->content, "\n";
}
}
if ($request->param('purge')) {
# now move any old stuff into archive and clean stale entries
# just going with the simple approach of "whoever sees old entries
# first, cleans em up, whether they 'own' them or not". Hopefully,
# the default locking will be sufficient to prevent a race.
close(STDOUT);
sleep(1);
$dbroot = "db";
$dbh = DBI->connect("DBI:CSV:f_dir=./$dbroot",
{RaiseError => 1, AutoCommit => 1})
|| die "Cannot connect: " . $DBI::errstr;
$arc = DBI->connect("DBI:CSV:f_dir=./$dbroot/archive",
{RaiseError => 1, AutoCommit => 1})
|| die "Cannot connect: " . $DBI::errstr;
purgeStaleEntries($id);
$dbh->disconnect();
$arc->disconnect();
}
exit 0;

Просмотреть файл

@ -1,112 +0,0 @@
#!/usr/bin/perl
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
use Time::HiRes qw(gettimeofday tv_interval);
sub encodeHiResTime {
my $timeref = shift;
return unless ref($timeref);
my $time = $$timeref[0] . "-" . $$timeref[1];
return $time;
}
my $time = encodeHiResTime([gettimeofday()]);
print "Content-type: text/html\n\n";
print <<"ENDOFHTML";
<html>
<head>
<script>
var gServerTime = '$time';
function tokenizeQuery() {
var query = {};
var pairs = document.location.search.substring(1).split('&');
for (var i=0; i < pairs.length; i++) {
var pair = pairs[i].split('=');
query[pair[0]] = unescape(pair[1]);
}
return query;
}
function setLocationHref(aHref, aReplace) {
if (aReplace)
document.location.replace(aHref);
else
document.location.href = aHref;
}
var gHref;
function doNextRequest(aTime) {
function getValue(arg,def) {
return !isNaN(arg) ? parseInt(Number(arg)) : def;
}
var q = tokenizeQuery();
var delay = getValue(q['delay'], 0);
var now = (new Date()).getTime();
var c_intvl = now - c_ts;
var c_ts = now + delay; // adjust for delay time
// Now make the request ...
if (q['url']) {
gHref = q['url'] +
"?c_part=" + -1 + // bogo request is not recorded
"&index=" + 0 +
"&id=" + q['id'] +
"&maxcyc=" + q['maxcyc'] +
"&replace=" + q['replace'] +
"&nocache=" + q['nocache'] +
"&delay=" + delay +
"&timeout=" + q['timeout'] +
"&c_intvl=" + c_intvl +
"&s_ts=" + gServerTime +
"&c_ts=" + c_ts;
window.setTimeout("setLocationHref(gHref,false);", delay);
return true;
}
}
function startTest() {
if (window.innerHeight && window.innerWidth) {
// force a consistent region for layout and painting.
window.innerWidth=820;
window.innerHeight=620;
}
doNextRequest(0);
}
window.setTimeout("startTest()", 1000);
</script>
</head>
<body>
<p>
This page starts the test.
</p>
<p>
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
</p>
<p>
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
</p>
<p>
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
dummy page dummy page dummy page dummy page dummy page dummy page
</p>
</body>
</html>
ENDOFHTML
exit 0;

Просмотреть файл

@ -1,93 +0,0 @@
#!/usr/bin/perl
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
use CGI::Carp qw(fatalsToBrowser);
use CGI::Request;
use URLTimingDataSet;
use URLTimingGraph;
my $request = new CGI::Request;
my $id = $request->param('id'); #XXX need to check for valid parameter id
my $id2 = $request->param('id2') || undef; # possible comparison test
# set up the data for the first graph
my $rs = URLTimingDataSet->new($id);
my @data = ();
push @data, [ map($_->[1], @{$rs->{sorted}}) ]; # URL
push @data, [ map($_->[4], @{$rs->{sorted}}) ]; # median
# '7' is the first slot for individual test run data
for (my $idx = 7; $idx < (7+$rs->{count}); $idx++) {
push @data, [ map($_->[$idx], @{$rs->{sorted}}) ];
}
# set up the data for the second graph, if requested a second id
# need to sort according to the first chart's ordering
my $rs2;
if ($id2) {
$rs2 = URLTimingDataSet->new($id2);
my @order = map($_->[0], @{$rs->{sorted}}); # get the first chart's order
my @resort = ();
for my $i (@order) {
for (@{$rs2->{sorted}}) {
if ($i == $_->[0]) {
push @resort, $_;
last;
}
}
}
push @data, [ map($_->[4], @resort) ]; # median
for (my $idx = 7; $idx < (7+$rs2->{count}); $idx++) {
push @data, [ map($_->[$idx], @resort) ];
}
}
# and now convert 'NaN' to undef, if they exist in the data.
for (@data) { for (@$_) { $_ = undef if $_ eq "NaN"; } }
# set up the chart parameters
my $args = {};
$args->{cgimode} = 1;
$args->{title} = "id=$id";
# need to draw first visit as dotted with points
my $types = ['lines','lines']; for (1..$rs->{count}-1) { push @$types, undef; }
my $dclrs = []; for (0..$rs->{count}) { push @$dclrs, 'lred'; }
my $legend = [$id]; for (1..$rs->{count}) { push @$legend, undef; }
if ($id2) {
push @$types, 'lines'; for (1..$rs2->{count}) { push @$types, undef; }
for (0..$rs2->{count}) { push @$dclrs, 'lblue'; }
push @$legend, $id2; for (1..$rs2->{count}) { push @$legend, undef; }
}
$args->{types} = $types;
$args->{dclrs} = $dclrs;
$args->{legend} = $legend;
#XXX set min to zero, and round max to 1000
$args->{y_max_value} = maxDataOrCap();
## nope $args->{y_min_value} = 1000;
$args->{width} = 800;
$args->{height} = 720;
my $g = URLTimingGraph->new(\@data, $args);
$g->plot();
exit;
sub maxDataOrCap {
my $max;
warn $rs->{maximum};
if ($rs2 && ($rs->{maximum} < $rs2->{maximum})) {
$max = $rs2->{maximum};
} else {
$max = $rs->{maximum};
}
warn $max;
#return $max > 10000 ? 10000 : 1000*int($max/1000)+1000;
# just return whatever, rounded to 1000
return 1000*int($max/1000)+1000;
}

Просмотреть файл

@ -1,618 +0,0 @@
#!/usr/bin/perl
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
use strict;
use CGI::Request;
use CGI::Carp qw(fatalsToBrowser);
use Time::HiRes qw(gettimeofday tv_interval);
use POSIX qw(strftime);
use DBI;
# list of test pages, JS to insert, httpbase, filebase, etc.
use PageData;
use vars qw(%params $req $cgi $dbh $pagedata
$gStartNow $gStartNowStr
$gResponseNow $gLogging);
$gStartNow = [gettimeofday]; # checkpoint the time
$gStartNowStr = strftime "%Y%m%d%H%M%S", localtime;
$gLogging = 1;
$req = new CGI::Request; # get the HTTP/CGI request
$cgi = $req->cgi;
$pagedata = PageData->new;
setDefaultParams();
#XXXdebugcrap
#warn $params{index}, " ", $params{maxidx};
if (!defined($req->param('delay'))) {
# give the user a form to pick options (but note that going
# to "loader.pl?delay=1000" immediately starts the test run
outputForm();
}
elsif (!$req->param('id')) {
initialize(); # do redirect to start the cycle
}
elsif ($params{index} > $params{maxidx}) {
redirectToReport(); # the test is over; spit out a summary
markTestAsComplete(); # close the meta table entry
}
elsif (!isRequestStale()) {
outputPage(); # otherwise, keep dishing out pages
updateDataBase(); # client has the response; now write out stats to db
}
# cleanup
$req = undef;
$dbh->disconnect() if $dbh; # not strictly required (ignored in some cases anyways)
#logMessage(sprintf("Page load server responded in %3d msec, total time %3d msec, pid: %d",
# 1000*tv_interval($gStartNow, $gResponseNow), 1000*tv_interval($gStartNow), $$))
# if $gResponseNow; # log only when a test page has been dished out
exit 0;
#######################################################################
sub logMessage {
print STDERR strftime("[%a %b %d %H:%M:%S %Y] ", localtime), @_, "\n"
if $gLogging;
}
sub isRequestStale {
my $limit = 30*60; # 30 minutes, although if we never stalled on mac I'd make it 3 minutes
my $ts = decodeHiResTime($params{s_ts});
my $delta = tv_interval($ts, $gStartNow);
return undef if $delta < $limit;
# otherwise, punt this request
print "Content-type: text/html\n\n";
print <<"ENDOFHTML";
<html><head><title>Page Loading Times Test</title></head><body>
<p><b>The timestamp on the request is too old to continue:<br>
s_ts=$params{s_ts} was $delta seconds ago. Limit is $limit seconds.</b></p>
</body></html>
ENDOFHTML
return 1; # it's stale
}
sub initialize {
updateMetaTable();
createDataSetTable();
# start the test by bouncing off of an echo page
my $script = $cgi->var("SCRIPT_NAME");
my $server = $cgi->var("SERVER_NAME");
my $proto = $ENV{SERVER_PORT} == 443 ? 'https://' : 'http://';
my $me = $proto . $server . $script;
$script =~ /^(.*\/).*$/;
my $loc = "Location: ". $proto . $server . $1 . "echo.pl?";
for (qw(id index maxcyc delay replace nocache timeout)) {
$loc .= "$_=$params{$_}\&";
}
$loc .= "url=" . $me;
print $loc, "\n\n";
}
sub redirectToReport {
# n.b., can also add '&sort=1' to get a time sorted list
my $proto = $ENV{SERVER_PORT} == 443 ? 'https://' : 'http://';
my $loc = "Location: " . $proto . $cgi->var("SERVER_NAME");
$cgi->var("SCRIPT_NAME") =~ /^(.*\/).*$/;
$loc .= $1 . "report.pl?id=" . $params{id};
# To use for a tinderbox, comment out the line above and uncomment this:
# $loc .= $1 . "dump.pl?id=" . $params{id} . "&purge=1";
print $loc, "\n\n";
}
sub generateTestId {
# use the epoch time, in hex, plus a two-character random.
return sprintf "%8X%02X", time(), int(256*rand());
}
sub setDefaultParams {
$params{id} = $req->param('id') || generateTestId(); # "unique" id for this run
$params{index} = $req->param('index') || 0; # request index for the test
$params{maxcyc} = defined($req->param('maxcyc')) ?
$req->param('maxcyc') : 3; # max visits (zero-based count)
$params{delay} = $req->param('delay') || 1000; # setTimeout on the next request (msec)
$params{replace} = $req->param('replace') || 0; # use Location.replace (1) or Location.href (0)
$params{nocache} = $req->param('nocache') || 0; # serve content via uncacheable path
$params{c_part} = $req->param('c_part') || 0; # client time elapsed; page head to onload (msec)
$params{c_intvl} = $req->param('c_intvl') || 0; # client time elapsed; onload to onload event (msec)
$params{c_ts} = $req->param('c_ts') || 0; # client timestamp (.getTime()) (msec)
$params{content} = $req->param('content') || "UNKNOWN"; # name of content page for this data
$params{s_ts} = $req->param('s_ts') || undef; # server timestamp; no default
$params{timeout} = $req->param('timeout') || 30000; # msec; timer will cancel stalled page loading
$params{maxidx} = ($params{maxcyc}+1) * $pagedata->length; # total pages loads to be done
$params{curidx} = $params{index} % $pagedata->length; # current request index into page list
$params{curcyc} = int(($params{index}-1) / $pagedata->length); # current "cycle" (visit)
}
sub outputPage {
my $relpath = $pagedata->url($params{curidx});
my $file = $pagedata->filebase . $relpath;
open (HTML, "<$file") ||
die "Can't open file: $file, $!";
my $hook = "<script xmlns='http://www.w3.org/1999/xhtml'>\n";
$hook .= "var g_moztest_Start = (new Date()).getTime();\n";
$hook .= "var g_moztest_ServerTime='" . encodeHiResTime($gStartNow) . "';\n";
$hook .= "var g_moztest_Content='" . $pagedata->name($params{curidx}) . "';\n";
$hook .= $pagedata->clientJS; # ... and the main body
$hook .= "var g_moztest_safetyTimer = ";
$hook .= "window.setTimeout(moztest_safetyValve, " . $params{timeout} . ");";
$hook .= "</script>\n";
my $basepath = $pagedata->httpbase;
$basepath =~ s/^http:/https:/i
if $ENV{SERVER_PORT} == 443;
#warn "basepath: $basepath";
$basepath =~ s#^(.*?)(/base/)$#$1/nocache$2# if ($params{nocache});
$hook .= "<base href='". $basepath . $relpath .
"' xmlns='http://www.w3.org/1999/xhtml' />";
my $magic = $pagedata->magicString;
my $content = "";
while (<HTML>) {
s/$magic/$hook/;
$content .= $_;
}
my $contentTypeHeader;
my $mimetype = $pagedata->mimetype($params{curidx});
my $charset = $pagedata->charset($params{curidx});
if ($charset) {
$contentTypeHeader = qq{Content-type: $mimetype; charset="$charset"\n\n};
} else {
$contentTypeHeader = qq{Content-type: $mimetype\n\n};
}
#warn $contentTypeHeader; #XXXjrgm testing...
# N.B., these two cookie headers are obsolete, since I pass server info in
# JS now, to work around a bug in winEmbed with document.cookie. But
# since I _was_ sending two cookies as part of the test, I have to keep
# sending two cookies (at least for now, and it's not a bad thing to test)
#XXX other headers to test/use?
$gResponseNow = [gettimeofday]; # for logging
{ # turn on output autoflush, locally in this block
print "Set-Cookie: moztest_SomeRandomCookie1=somerandomstring\n";
print "Set-Cookie: moztest_SomeRandomCookie2=somerandomstring\n";
print $contentTypeHeader;
local $| = 1;
print $content;
}
return;
}
sub encodeHiResTime {
my $timeref = shift;
return unless ref($timeref);
return $$timeref[0] . "-" . $$timeref[1];
}
sub decodeHiResTime {
my $timestr = shift;
return [ split('-', $timestr) ];
}
sub elapsedMilliSeconds {
my ($r_time, $timestr) = @_;
return "NaN" unless $timestr;
my $delta = tv_interval( [ split('-', $timestr) ], $r_time );
my $delta = int(($delta*1000) - $params{delay}); # adjust for delay (in msec)
return $delta;
}
sub updateDataBase {
connectToDataBase(); # (may already be cached)
updateMetaTable();
updateDataSetTable() unless $params{c_part} == -1; # the initial request
}
sub connectToDataBase {
# don't reconnect if already connected. (Other drivers provide this
# for free I think, but not this one).
if (!ref($dbh)) {
$dbh = DBI->connect("DBI:CSV:f_dir=./db", {RaiseError => 1, AutoCommit => 1})
|| die "Cannot connect: " . $DBI::errstr;
}
}
#
# Holds the individual page load data for this id.
#
# (Of course, this should really be a single table for all datasets, but
# that was becoming punitively slow with DBD::CSV. I could have moved to
# a "real" database, but I didn't want to make that a requirement for
# installing this on another server and using this test (e.g., install a
# few modules and you can run this; no sql installation/maintenance required).
# At some point though, I may switch to some sql db, but hopefully still allow
# this to be used with a simple flat file db. (Hmm, maybe I should try a *dbm
# as a compromise (disk based but indexed)).
#
sub createDataSetTable {
my $table = "t" . $params{id};
return if -f "db/$table"; # don't create it if it exists
logMessage("createDataSetTable:\tdb/$table");
connectToDataBase(); # cached
my ($sth, $sql);
$sql = qq{
CREATE TABLE $table
(DATETIME CHAR(14),
ID CHAR(10),
INDEX INTEGER,
CUR_IDX INTEGER,
CUR_CYC INTEGER,
C_PART INTEGER,
S_INTVL INTEGER,
C_INTVL INTEGER,
CONTENT CHAR(128)
)
};
$sth = $dbh->prepare($sql);
$sth->execute();
$sth->finish();
return 1;
}
#
# holds the information about all test runs
#
sub createMetaTable {
my $table = shift;
return if -f "db/$table"; # don't create it if it exists
logMessage("createMetaTable:\tdb/$table");
my ($sth, $sql);
$sql = qq{
CREATE TABLE $table
(DATETIME CHAR(14),
LASTPING CHAR(14),
ID CHAR(8),
INDEX INTEGER,
CUR_IDX INTEGER,
CUR_CYC INTEGER,
CUR_CONTENT CHAR(128),
STATE INTEGER,
BLESSED INTEGER,
MAXCYC INTEGER,
MAXIDX INTEGER,
REPLACE INTEGER,
NOCACHE INTEGER,
DELAY INTEGER,
REMOTE_USER CHAR(16),
HTTP_USER_AGENT CHAR(128),
REMOTE_ADDR CHAR(15),
USER_EMAIL CHAR(32),
USER_COMMENT CHAR(256)
)
};
$sth = $dbh->prepare($sql);
$sth->execute();
$sth->finish();
warn 'created meta table';
return 1;
}
sub updateMetaTable {
connectToDataBase(); # if not already connected
my $table = "tMetaTable";
createMetaTable($table); # just returns if already created
my ($sth, $sql);
$sql = qq{
SELECT INDEX, MAXCYC, MAXIDX, REPLACE, NOCACHE,
DELAY, REMOTE_USER, HTTP_USER_AGENT, REMOTE_ADDR
FROM $table
WHERE ID = '$params{id}'
};
$sth = $dbh->prepare($sql);
$sth->execute();
my @dataset = ();
while (my @data = $sth->fetchrow_array()) {
push @dataset, {index => shift @data,
maxcyc => shift @data,
maxidx => shift @data,
replace => shift @data,
nocache => shift @data,
delay => shift @data,
remote_user => shift @data,
http_user_agent => shift @data,
remote_addr => shift @data
};
}
$sth->finish();
warn "More than one ID: $params{id} ??" if scalar(@dataset) > 1;
if (scalar(@dataset) == 0) {
# this is a new dataset and id
initMetaTableRecord($table);
return;
}
#XXX need to check that values are sane, and not update if they don't
# match certain params. This should not happen in a normal test run.
# However, if a test url was bookmarked or in history, I might get bogus
# data collected after the fact. But I have a stale date set on the URL,
# so that is good enough for now.
# my $ref = shift @dataset; # check some $ref->{foo}
$sql = qq{
UPDATE $table
SET LASTPING = ?,
INDEX = ?,
CUR_IDX = ?,
CUR_CYC = ?,
CUR_CONTENT = ?,
STATE = ?
WHERE ID = '$params{id}'
};
$sth = $dbh->prepare($sql);
$sth->execute($gStartNowStr,
$params{index}-1, # (index-1) is complete; (index) in progress
($params{curidx}-1) % $pagedata->length,
$params{curcyc},
$params{content},
'OPEN'
);
$sth->finish();
}
sub markTestAsComplete {
connectToDataBase(); # if not already connected
my $table = "tMetaTable";
createMetaTable($table); # just returns if already created
my ($sth, $sql);
#XXX should probably check if this ID exists first
$sql = qq{
UPDATE $table
SET STATE = "COMPLETE"
WHERE ID = '$params{id}'
};
$sth = $dbh->prepare($sql);
$sth->execute();
$sth->finish();
}
sub initMetaTableRecord {
# we know this record doesn't exist, so put in the initial values
my $table = shift;
my ($sth, $sql);
$sql = qq{
INSERT INTO $table
(DATETIME,
LASTPING,
ID,
INDEX,
CUR_IDX,
CUR_CYC,
CUR_CONTENT,
STATE,
BLESSED,
MAXCYC,
MAXIDX,
REPLACE,
NOCACHE,
DELAY,
REMOTE_USER,
HTTP_USER_AGENT,
REMOTE_ADDR,
USER_EMAIL,
USER_COMMENT
)
VALUES (?,?,?,?,
?,?,?,?,
?,?,?,?,
?,?,?,?,
?,?,?)
};
$sth = $dbh->prepare($sql);
$sth->execute($gStartNowStr,
$gStartNowStr,
$params{id},
$params{index}-1,
($params{curidx}-1) % $pagedata->length,
$params{curcyc},
$params{content},
"INIT",
0,
$params{maxcyc},
$params{maxidx},
$params{replace},
$params{nocache},
$params{delay},
$cgi->var("REMOTE_USER"),
$cgi->var("HTTP_USER_AGENT"),
$cgi->var("REMOTE_ADDR"),
"",
""
);
$sth->finish();
}
sub updateDataSetTable {
my $table = shift;
my $table = "t" . $params{id};
my ($sth, $sql);
$sql = qq{
INSERT INTO $table
(DATETIME,
ID,
INDEX,
CUR_IDX,
CUR_CYC,
C_PART,
S_INTVL,
C_INTVL,
CONTENT
)
VALUES (?,?,?,?,
?,?,?,?,?)
};
my $s_intvl = elapsedMilliSeconds( $gStartNow, $params{s_ts} );
$sth = $dbh->prepare($sql);
$sth->execute($gStartNowStr,
$params{id},
$params{index}-1,
($params{curidx}-1) % $pagedata->length,
$params{curcyc},
$params{c_part},
$s_intvl,
$params{c_intvl},
$req->param('content'),
);
$sth->finish();
}
sub outputForm {
my @prog = split('/', $0); my $prog = $prog[$#prog];
print "Content-type: text/html\n\n";
my $bgcolor = $ENV{SERVER_PORT} == 443 ? '#eebb66' : '#ffffff';
print <<"ENDOFHTML";
<html>
<head>
<title>Page Loading Times Test</title>
</head>
<body bgcolor="$bgcolor">
<h3>Page Loading Times Test</h3>
<p>Questions: <a href="mailto:jrgm\@netscape.com">John Morrison</a>
ENDOFHTML
print "&nbsp;&nbsp;-&nbsp;&nbsp;";
my $script = $cgi->var("SCRIPT_NAME");
my $server = $cgi->var("SERVER_NAME");
# pick the "other" protocol (i.e., test is inverted)
my $proto = $ENV{SERVER_PORT} == 443 ? 'http://' : 'https://';
my $other = $proto . $server . $script;
if ($ENV{SERVER_PORT} == 443) {
print "[&nbsp;<a href='$other'>With no SSL</a>&nbsp;|&nbsp;<b>With SSL</b>&nbsp;]<br>";
} else {
print "[&nbsp;<b>With no SSL</b>&nbsp;|&nbsp;<a href='$other'>With SSL</a>&nbsp;]<br>";
}
print <<"ENDOFHTML";
<form method="get" action="$prog" >
<table border="1" cellpadding="5" cellspacing="2">
<tr>
<td valign="top">
Page-load to Page-load Delay (msec):<br>
(Use 1000. Be nice.)
</td>
<td valign="top">
<select name="delay">
<option value="0">0
<option value="500">500
<option selected value="1000">1000
<option value="2000">2000
<option value="3000">3000
<option value="4000">4000
<option value="5000">5000
</select>
</td>
</tr>
<tr>
<td valign="top">
Number of test cycles to run:<br>
<br>
</td>
<td valign="top">
<select name="maxcyc">
<option value="0">1
<option value="1">2
<option value="2">3
<option value="3">4
<option value="4" selected>5
<option value="5">6
<option value="6">7
</select>
</td>
</tr>
<tr>
<td valign="top">
How long to wait before cancelling (msec):<br>
(Don't change this unless on a very slow link, or very slow machine.)
</td>
<td valign="top">
<select name="timeout">
<option value="15000">15000
<option selected value="30000">30000
<option value="45000">45000
<option value="60000">60000
<option value="90000">90000
</select>
</td>
</tr>
<tr>
<td valign="top">
<input type="reset" value="reset">
</td>
<td valign="top">
<input type="submit" value="submit">
</td>
</tr>
</table>
<hr>
<p>
You can visit the content that will be loaded, minus the embedded
javascript, by clicking on any of the links below.
</p>
<table border="1" cellpadding="5" cellspacing="2">
ENDOFHTML
my $i;
print "<tr>\n";
my $base = $pagedata->httpbase;
$base =~ s/^http:/https:/i
if $ENV{SERVER_PORT} == 443;
for ($i=0; $i<$pagedata->length; $i++) {
print "<td nowrap><a href='", $base, $pagedata->url($i), "'>";
print $pagedata->name($i);
print "</a>\n";
print "</tr><tr>\n" if (($i+1)%4 == 0);
}
print "</tr>" if (($i+1)%4 != 0);
print "</table></form></body></html>\n";
return;
}

Просмотреть файл

@ -1,78 +0,0 @@
#!/usr/bin/perl
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
use CGI::Carp qw(fatalsToBrowser);
use CGI::Request;
use URLTimingDataSet;
use strict;
my $request = new CGI::Request;
my $id = $request->param('id'); #XXX need to check for valid parameter id
print "Content-type: text/html\n\n";
print "<p>See Notes at the bottom of this page for some details.</p>\n";
print "<pre>\n";
my $rs = URLTimingDataSet->new($id);
print "Test id: $id<br>Avg. Median : <b>", $rs->{avgmedian},
"</b> msec\t\tMinimum : ", $rs->{minimum}, " msec\n";
print "Average : ", $rs->{average},
" msec\t\tMaximum : ", $rs->{maximum}, " msec</pre>\n\n\n";
#XXX print more info (test id, ua, start time, user, IP, etc.)
# draw the chart sorted
# XXX enable this line to draw a chart, sorted by time. However, in order
# to draw the chart, you will need to have installed the 'gd' drawing library,
# and the GD and GD::Graph Perl modules.
###print "\n<p><img src='graph.pl?id=", $id, "' height='720' width='800'></p><br>\n";
print "<hr><pre>\nIDX PATH AVG MED MAX MIN TIMES ...\n";
if ($request->param('sort')) {
print $rs->as_string_sorted();
} else {
print $rs->as_string();
}
print "</pre>\n";
printEndNotes();
exit;
sub printEndNotes {
print <<"EndOfNotes";
<hr>
<p>
<ol>
<li>Times are in milliseconds.
<li>AVG, MED, MIN and MAX are the average, median, maximum and
minimum of the (non-NaN) test results for a given page.
<li>If a page fails to fire the onload event within 30 seconds,
the test for that page is "aborted": a different JS function kicks in,
cleans up, and reports the time as "NaN". (The rest of the pages in
the series should still be loaded normally after this).
<li>The value for AVG reported for 'All Pages' is the average of
the averages for all the pages loaded.
<li>The value for MAX and MIN reported for 'All Pages' are the
overall maximum and minimum for all pages loaded (keeping in mind that
a load that never finishes is recorded as "NaN".)
<li>The value for MED reported for 'All Pages' is the _average_ of
the medians for all the pages loaded (i.e., it is not the median of
the medians).
</ol>
</p>
EndOfNotes
}

Просмотреть файл

@ -1,65 +0,0 @@
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Config file for page loading test
#
# HTTPBASE: is the URL to the root of the content pages
# FILEBASE: is the file path to the same location (I need both)
#
# Remaining lines are the names of top level directories under FILEBASE
# which contain the content files, followed by an optional filename in
# that directory (index.html is assumed if no filename given), and then
# followed by an optional 'charset' value to ship in the 'Content-type'
# header. [Note: if you want to set the charset, then you must also
# explicitly set the filename field].
#
# Warning: you don't want to casually changing the set of urls that you are
# testing against, if you want to be able to make any reasonable comparison over
# time. And don't change this file while a test is in progress, as it will
# competely scramble the results for that test.
HTTPBASE: http://somehost.somedomain.sometld/content/base/
FILEBASE: /var/www/html/content/base/
home.netscape.com index.html # optionally specify a filename
my.netscape.com index.html text/html iso-8859-1 # optionally specify a filename, mime type and charset
www.aol.com index.html text/html # optionally specify a filename and mime type
www.mapquest.com
www.moviefone.com
www.digitalcity.com
www.iplanet.com
web.icq.com
www.compuserve.com
www.msnbc.com
www.yahoo.com
bugzilla.mozilla.org
www.msn.com
slashdot.org
www.nytimes.com
www.nytimes.com_Table
www.w3.org_DOML2Core
lxr.mozilla.org
espn.go.com
www.voodooextreme.com
www.wired.com
hotwired.lycos.com
www.ebay.com
www.apple.com
www.amazon.com
www.altavista.com
www.zdnet.com_Gamespot.com
www.spinner.com
www.microsoft.com
www.time.com
www.travelocity.com
www.expedia.com
www.quicken.com
www.zdnet.com
www.excite.com
www.google.com
www.tomshardware.com
www.cnn.com
news.cnet.com
www.sun.com