#! /usr/bin/env perl # Basic script to give us the status of a test suite. # Read the testspec.xml file for the test suite, and print # out the TestStatus for each test. use strict; use warnings; use Cwd qw(getcwd abs_path); use File::Basename; use File::stat; use Data::Dumper; use Getopt::Long; use XML::LibXML; use IO::File; my $cwd; my $testroot; my $cimeroot; my %opts; my @testspecxmls; my $banner = '-' x 120; # Open the testspec.xml, get the cimeroot, sub init { $cwd = getcwd; $testroot = getcwd; my @testspecs; # use the single test spec passed in on the command line, otherwise # find all the test specs in the current directory. if(defined $opts{'testspec'}) { push(@testspecs, $opts{'testspec'}); } else { # find all the testspec xml files in the current directory opendir(my $curdir, ".") or die "can't open current working directory!\n"; @testspecs = grep { /^testspec.*xml$/ } readdir($curdir); closedir($curdir); } # die if we don't find any spec files if(!@testspecs) { die "no testspec xml files found! Aborting\n"; } # get the cimeroot my $parser = XML::LibXML->new( no_blanks => 1); my $xml = $parser->parse_file($testspecs[0]); my @node = $xml->findnodes("//cimeroot"); $cimeroot = $node[0]->textContent(); if (! defined $opts{'expectedfails'} and -e "$cimeroot/ExpectedTestFails.xml") { $opts{'expectedfails'} = $cimeroot . "/scripts/Testing/TestListxml/ExpectedTestFails.xml"; } if(defined $opts{'all'}) { foreach my $testspec(@testspecs) { push(@testspecxmls, $testspec); } } else { my $modtime = 0; my $latestspecname; foreach my $testspec(@testspecs) { open my $SPEC, "<", "./$testspec" or die "couldn't open testspec file"; my $fstat = stat($SPEC); my $specmodtime = $fstat->mtime; close $SPEC; if($specmodtime > $modtime) { $modtime = $specmodtime; $latestspecname = $testspec; } } push(@testspecxmls, $latestspecname); } chdir $cwd; return @testspecxmls; } sub getCompareTestSpec { my $comparespec = glob("$opts{'comparedir'}/testspec.$opts{'compareid'}.*.xml"); #print "Compare test spec: $comparespec\n"; return $comparespec; } # Take the testspec.$testid.$machine.xml file, parse it, # and get the names of the case directories sub getCaseDirsFromSpec { my $testspec = shift; my @casedirs; #print "Casedirs: \n"; #map {print "$_\n"} @casedirs; my $parser = XML::LibXML->new( no_blanks => 1); my $xml = $parser->parse_file($testspec); my @tests = $xml->findnodes("//test"); foreach my $test (@tests) { my $case = $test->getAttribute('case'); push(@casedirs, $case); } return @casedirs; } # Given an array of case directories, get the test status for # all the testcase directories found. sub getTestStatus { my $testspec = shift; my $testdir = shift; my %xfails; my @xfailnodes; if (defined $opts{'expectedfails'}) { my $expectedfails = abs_path($opts{'expectedfails'}); my $testlist = $expectedfails; my $parser = XML::LibXML->new( no_blanks => 1); my $testxml = $parser->parse_file($testlist); @xfailnodes = $testxml->findnodes("//entry"); foreach my $xfailnode (@xfailnodes) { my @xfailchild_nodes = $xfailnode->childNodes(); } } my @tests; my @casedirs = getCaseDirsFromSpec($testspec); foreach my $testcase(@casedirs) { # each of these blocks is a target TestStatus file my $teststatusfile = "$testdir/$testcase/TestStatus"; next if (! -e $teststatusfile); chdir "$testdir/$testcase"; my $testbase = `./xmlquery CASEBASEID -value`; chdir $cwd; open my $STATUS, "<", $teststatusfile or die $!; my @lines = <$STATUS>; close $STATUS; my %testhash; $testhash{'casedirectory'} = $testcase; $testhash{'fullpath'} = "$testdir/$testcase"; $testhash{'status'} = $lines[0]; my @teststatus_line = (split(/\s+/, $testhash{'status'})); my $teststatus = "$teststatus_line[0]"; #print "$teststatus\n"; if (defined $opts{'expectedfails'}) { # is there an expected fail that matches this testbasename? my $xfailbugz = ""; my $xfailnode; for my $n (0 .. $#lines) { my $status_line = $lines[$n]; $status_line =~ m/(\w+\s)(.+$)/; my $status_info = $2; chomp($status_info); chomp($status_line); foreach $xfailnode (@xfailnodes) { my $xfail_entry = $xfailnode->textContent(); chomp($xfail_entry); my $bugz = $xfailnode->getAttribute("bugz"); $xfail_entry =~ m/(\w+\s)(.+$)/; my $xfail_info = $2; chomp($xfail_info); if (($status_line =~ m/(^$xfail_entry)(\..*$)/) || ($xfail_info eq $status_info) || ($status_line =~ m/$xfail_entry/)) { # unexpected passes if ($status_line =~ m/DONE/) { $status_line = 'U' . $status_line; } if ($status_line =~ m/PASS/) { $status_line = 'U' . $status_line; } # expected fails if ($status_line =~ m/FAIL/) { if ($bugz) { $status_line = 'KTF ' . $status_line . "(bugzilla $bugz) \n"; } else { $status_line = 'KTF ' . $status_line ; } } if ($status_line =~ m/RUN/) { if ($bugz) { $status_line = 'KTF ' . $status_line . "(bugzilla $bugz) \n"; } else { $status_line = 'KTF ' . $status_line; } } $lines[$n] = $status_line; last; } } } } $testhash{'status'} = $lines[0]; shift @lines; @{$testhash{'rawstatus'}} = @lines; push(@tests, \%testhash); } chdir $cwd; return @tests; } sub printStatus { my ($tests) = @_; foreach my $test(@$tests) { print "$$test{'status'} "; foreach(@{$$test{'rawstatus'}}){ print " $_"; } print "\n"; } } # Compare the performance of two arbitrary test suites.. # We will only compare performance for tests that PASSed in # both suites.. # sub comparePerformance { my $currentTestRoot = shift; my $compareTestRoot = shift; my $currentTestSpec = getCwdTestSpec($currentTestRoot); my $compareTestSpec = getCompareTestSpec($compareTestRoot); my @allCurrentTests = getTestStatus($currentTestSpec, $currentTestRoot); my @allCompareTests = getTestStatus($compareTestSpec, $compareTestRoot); # We only want to compare passing tests.. my @passingCurrentTests = grep { $$_{'status'} eq 'PASS' } @allCurrentTests; my @passingCompareTests = grep { $$_{'status'} eq 'PASS' } @allCompareTests; foreach my $current(@passingCurrentTests) { getTimingForTest($current); } foreach my $compare(@passingCompareTests) { getTimingForTest($compare); } # Print the report print "$banner\n"; print "Performance Comparision Report\n"; print "$banner\n"; printf "%-40s %-20s\n", "Current Test root:", "$currentTestRoot"; printf "%-40s %-20s\n", "Current Passing Tests: ", scalar @passingCurrentTests; print "$banner\n"; printf "%-40s %-20s\n", "Comparing against this test root:", "$compareTestRoot"; printf "%-40s %-20s\n", "Passing Comparison Tests: ", scalar @passingCurrentTests; foreach my $currenttest(@passingCurrentTests) { my @compares = grep { $$_{'test'} eq $$currenttest{'test'}} @passingCompareTests; next if ! @compares; my $comparetest = $compares[0]; printf "$banner\n"; printf "%-70s %-20s\n", $$currenttest{'casedirectory'}, $$comparetest{'casedirectory'}; #printf "$banner\n"; printf "%-70s %-20s\n", "Throughput:", "Throughput:"; printf "%-70s %-20s\n", $$currenttest{'throughput'}, $$comparetest{'throughput'}; } } sub getTimingForTest { my $test = shift; my $timingdir = "$$test{'fullpath'}/timing"; opendir(DIR, $timingdir) or warn "cannot open $timingdir, $!"; my @timingfiles = grep(/[0-9]$/, readdir(DIR)); closedir(DIR); foreach my $timingfile(@timingfiles) { $timingfile = "$timingdir/$timingfile"; open my $tfile, "<", $timingfile or warn "cannot open the timing file $timingfile, $!"; my @tlines = <$tfile>; close $tfile; my $modelcost = (grep(/Model Cost/i, @tlines))[0]; my $modeltput = (grep(/Model Throughput/i, @tlines))[0]; my $init = (grep (/Init Time/, @tlines))[0]; my $run = (grep (/^\s+Run Time/, @tlines))[0]; my $final = (grep (/Final Time/, @tlines))[0]; chomp $init; chomp $run; chomp $final; my $inittime = (split(/\s+/, $init))[4]; my $runtime = (split(/\s+/, $run))[4]; my $finaltime = (split(/\s+/, $final))[4]; #print "init time: $inittime\n"; #print "run time: $runtime\n"; #print "final time: $finaltime\n"; my $totaltime = $inittime + $runtime + $finaltime; my $timeinhours = $totaltime / 3600; $modelcost =~ s/^\s+//; $modeltput =~ s/^\s+//; my @mc = split(/\s+/, $modelcost); my @mt = split(/\s+/, $modeltput); my $cost = $mc[2]; my $tput = $mt[2]; my $actualcost = $cost * $timeinhours; #print "actual cost for $timingdir: $actualcost\n"; #print "throughput for $timingdir: $tput\n"; $$test{'cost'} = $cost; $$test{'throughput'} = $tput; } } sub getTotalCost { my $tests = shift; my $totalcost = 0.0; my $costmsg; $costmsg .= "$banner\n"; $costmsg .= "Test suite at $opts{'testroot'}\n"; $costmsg .= "Passing tests included in this sum: ". scalar @$tests . "\n"; foreach my $test(@$tests) { getTimingForTest($test); $totalcost += $$test{'cost'}; } $costmsg .= "Total Cost for this suite: $totalcost\n"; $costmsg .= "$banner\n"; return $costmsg; } sub getOpts { GetOptions( "cost|c" => \$opts{'cost'}, "testspec=s" => \$opts{'testspec'}, "compareperf|p" => \$opts{'compareperformance'}, "testroot=s" => \$opts{'testroot'}, "testid=s" => \$opts{'testid'}, "comparedir=s" => \$opts{'comparedir'}, "compareid=s" => \$opts{'compareid'}, "expectedfails=s" => \$opts{'expectedfails'}, "all" => \$opts{'all'}, "help|h" => \$opts{'help'}, ); } sub usage() { my $usage = <