OK, here's the bit that does the measuring. I'm not showing the cgi-script that does the display - I'm too embarassed and don't have the time to make it suitable for public consumption. I do have a little bit of hubris, (and laziness in spades) :-)
But if you *really* want it leave your email.
perlcgi.
#!/usr/local/bin/perl -w
# Written, as quick 'n dirty hack - so here it is warts and all.
# This just retrieves a hash of sites and times how long for each page
# to download. Writes timing for each site to tab delimited file/stdou
+t.
use strict;
use LWP::Simple; # I know, better to use UserAgent
use Time::HiRes qw(gettimeofday); # Accuracy overkill, maybe?
my $thisrun=localtime(time); # When the data was collected
#Amend the next line as required or just leave it out
#open(OPFILE, ">>/var/logs/whatever") || die "Could not open/var/logs/
+whatever:$!";
# Hash of sites to be timed
my %url = (pubmed =>"http://www.ncbi.nlm.nih.gov",
proquest =>"http://proquest.umi.com",
JSTOR => "http://www.jstor.ac.uk/jstor/",
AMAZONUK => "http://www.amazon.co.uk/",
AMAZONUS => "http://www.amazon.com/",
SPRR => "http://sprr.library.nuigalway.ie",
SCIDIR => "http://www.sciencedirect.com/"
);
sub process_url {
my $url = shift;
my $now_time = gettimeofday;
my $page = get($url);
my $elapsed_time = gettimeofday-$now_time; #
print "Site:$url\tTime Taken:$elapsed_time\t Run on $thisrun\n";
# print OPFILE "$url\t$elapsed_time \t$thisrun\n";
}
foreach my $key (sort keys %url) { &process_url($url{$key}) }
# That's it!
|