1
1

contrib: remove nightly tarball scripts

The nightly tarball scripts have been moved to the ompi-scripts
repo (and rewritten), so are no longer needed in the ompi
repo.  Clean up some bits to make life a bit less confusing.

Signed-off-by: Brian Barrett <bbarrett@amazon.com>
Этот коммит содержится в:
Brian Barrett 2017-08-18 10:24:04 -07:00
родитель d9b2c94d4a
Коммит 2ac8f75e0c
8 изменённых файлов: 0 добавлений и 1435 удалений

Просмотреть файл

@ -1,167 +0,0 @@
#!/usr/bin/env perl
use warnings;
use strict;
use Getopt::Long;
use File::Temp qw/ tempfile tempdir /;
use File::Basename;
my $coverity_project = "hwloc";
# Coverity changes this URL periodically
my $coverity_tool_url = "https://scan.coverity.com/download/cxx/linux64";
my $filename_arg;
my $coverity_token_arg;
my $dry_run_arg = 0;
my $verbose_arg = 0;
my $debug_arg = 0;
my $logfile_dir_arg;
my $configure_args = "";
my $make_args = "-j 32";
my $help_arg = 0;
&Getopt::Long::Configure("bundling");
my $ok = Getopt::Long::GetOptions("filename=s" => \$filename_arg,
"coverity-token=s" => \$coverity_token_arg,
"logfile-dir=s" => \$logfile_dir_arg,
"configure-args=s" => \$configure_args,
"make-args=s" => \$make_args,
"dry-run!" => \$dry_run_arg,
"verbose!" => \$verbose_arg,
"debug!" => \$debug_arg,
"help|h" => \$help_arg);
$ok = 0
if (!defined($filename_arg));
$ok = 0
if (!defined($coverity_token_arg));
if (!$ok || $help_arg) {
print "Usage: $0 --filename=FILENAME --coverity-token=TOKEN [--dry-run] [--verbose] [--help]\n";
exit($ok);
}
die "Cannot read $filename_arg"
if (! -r $filename_arg);
$verbose_arg = 1
if ($debug_arg);
######################################################################
sub verbose {
print @_
if ($verbose_arg);
}
# run a command and save the stdout / stderr
sub safe_system {
my $allowed_to_fail = shift;
my $cmd = shift;
my $stdout_file = shift;
# Redirect stdout if requested or not verbose
if (defined($stdout_file)) {
$stdout_file = "$logfile_dir_arg/$stdout_file";
unlink($stdout_file);
$cmd .= " >$stdout_file";
} elsif (!$debug_arg) {
$cmd .= " >/dev/null";
}
$cmd .= " 2>&1";
my $rc = system($cmd);
if (0 != $rc && !$allowed_to_fail) {
# If we die/fail, ensure to change out of the temp tree so
# that it can be removed upon exit.
chdir("/");
print "Command $cmd failed: exit status $rc\n";
if (defined($stdout_file) && -f $stdout_file) {
print "Last command output:\n";
system("cat $stdout_file");
}
die "Cannot continue";
}
system("cat $stdout_file")
if ($debug_arg && defined($stdout_file) && -f $stdout_file);
}
######################################################################
# Make an area to work
my $dir = tempdir(CLEANUP => 0);
chdir($dir);
verbose "*** Working in $dir\n";
######################################################################
# Get the coverity tool, put it in our path
my $cdir = "$ENV{HOME}/coverity";
safe_system(0, "mkdir $cdir")
if (! -d $cdir);
# Optimization: the tool is pretty large. If our local copy is less
# than a day old, just use that without re-downloading.
my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size,
$atime,$mtime,$ctime,$blksize,$blocks) =
stat("$cdir/coverity_tool.tgz");
my $now = time();
if (!defined($mtime) || $mtime < $now - 24*60*60) {
verbose "*** Downloading new copy of the coverity tool\n";
safe_system(0, "wget $coverity_tool_url --post-data \"token=$coverity_token_arg&project=$coverity_project\" -O coverity_tool.tgz");
safe_system(0, "cp coverity_tool.tgz $cdir");
}
verbose "*** Expanding coverity tool tarball\n";
safe_system(0, "tar xf $cdir/coverity_tool.tgz");
opendir(my $dh, ".") ||
die "Can't opendir .";
my @files = grep { /^cov/ && -d "./$_" } readdir($dh);
closedir($dh);
my $cov_dir = "$dir/$files[0]/bin";
$ENV{PATH} = "$cov_dir:$ENV{PATH}";
######################################################################
# Expand the HWLOC tarball, build it
verbose "*** Extracting HWLOC tarball\n";
safe_system(0, "tar xf $filename_arg");
my $tarball_filename = basename($filename_arg);
$tarball_filename =~ m/^hwloc-(.+)\.tar.+$/;
my $hwloc_ver = $1;
chdir("hwloc-$hwloc_ver");
verbose "*** Configuring HWLOC tarball\n";
safe_system(0, "./configure $configure_args", "configure");
verbose "*** Building HWLOC tarball\n";
safe_system(0, "cov-build --dir cov-int make $make_args", "cov-build");
# Tar up the Coverity results
verbose "*** Tarring up results\n";
safe_system(0, "tar jcf $hwloc_ver-analyzed.tar.bz2 cov-int");
# If not dry-run, submit to Coverity
if ($dry_run_arg) {
verbose "*** Would have submitted, but this is a dry run\n";
} else {
verbose "*** Submitting results\n";
safe_system(0, "curl --form token=$coverity_token_arg " .
"--form email=brice.goglin\@labri.fr " .
"--form file=\@$hwloc_ver-analyzed.tar.bz2 " .
"--form version=$hwloc_ver " .
"--form description=nightly-master " .
"https://scan.coverity.com/builds?project=hwloc",
"coverity-submit");
}
verbose("*** All done\n");
# Chdir out of the tempdir so that it can be removed
chdir("/");
exit(0);

Просмотреть файл

@ -1,186 +0,0 @@
#!/bin/sh
#####
#
# Configuration options
#
#####
# e-mail address to send results to
results_addr=hwloc-devel@lists.open-mpi.org
#results_addr=rhc@open-mpi.org
# git repository URL
code_uri=https://github.com/open-mpi/hwloc.git
raw_uri=https://raw.github.com/open-mpi/hwloc
# where to put built tarballs
outputroot=$HOME/hwloc/nightly
# Target where to scp the final tarballs
output_ssh_target=ompiteam@192.185.39.252
# where to find the build script
script_uri=contrib/nightly/make_snapshot_tarball
# helper scripts dir
script_dir=$HOME/ompi/contrib/build-server
# Set this to any value for additional output; typically only when
# debugging
: ${debug:=}
# The tarballs to make
if [ $# -eq 0 ] ; then
# Branches v1.6 and earlier were not updated to build nightly
# snapshots from git, so only check v1.7 and later
branches="master v1.11"
else
branches=$@
fi
# Build root - scratch space
build_root=$HOME/hwloc/nightly-tarball-build-root
# Coverity stuff
coverity_token=`cat $HOME/coverity/hwloc-token.txt`
export PATH=$HOME_PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$HOME_PREFIX/lib:$LD_LIBRARY_PATH
#####
#
# Actually do stuff
#
#####
debug() {
if test -n "$debug"; then
echo "=== DEBUG: $*"
fi
}
run_command() {
debug "Running command: $*"
debug "Running in pwd: `pwd`"
if test -n "$debug"; then
eval $*
else
eval $* > /dev/null 2>&1
fi
if test $? -ne 0; then
echo "=== Command failed: $*"
fi
}
# load the modules configuration
. $MODULE_INIT
module use $AUTOTOOL_MODULE
# get our nightly build script
mkdir -p $build_root
cd $build_root
pending_coverity=$build_root/tarballs-to-run-through-coverity.txt
rm -f $pending_coverity
touch $pending_coverity
# Loop making them
module unload autotools
for branch in $branches; do
echo "=== Branch: $branch"
# Get the last tarball version that was made
prev_snapshot=`cat $outputroot/$branch/latest_snapshot.txt`
prev_snapshot_hash=`echo $prev_snapshot | cut -d- -f3`
echo "=== Previous snapshot: $prev_snapshot (hash: $prev_snapshot_hash)"
# Form a URL-specific script name
script=$branch-`basename $script_uri`
echo "=== Getting script from: $raw_uri"
run_command wget --quiet --no-check-certificate --tries=10 $raw_uri/$branch/$script_uri -O $script
if test ! $? -eq 0 ; then
echo "wget of hwloc nightly tarball create script failed."
if test -f $script ; then
echo "Using older version of $script for this run."
else
echo "No build script available. Aborting."
exit 1
fi
fi
chmod +x $script
module load "autotools/hwloc-$branch"
# module load "tex-live/hwloc-$branch"
echo "=== Running script..."
run_command ./$script \
$build_root/$branch \
$results_addr \
$outputroot/$branch \
$code_uri \
$branch
module unload autotools
echo "=== Done running script"
# Did the script generate a new tarball? Ensure to compare the
# only the hash of the previous tarball and the hash of the new
# tarball (the filename also contains the date/timestamp, which
# will always be different).
# If so, save it so that we can spawn the coverity checker on it
# afterwards. Only for this for the master (for now).
latest_snapshot=`cat $outputroot/$branch/latest_snapshot.txt`
latest_snapshot_hash=`echo $latest_snapshot | cut -d- -f3`
echo "=== Latest snapshot: $latest_snapshot (hash: $latest_snapshot_hash)"
if test "$prev_snapshot_hash" = "$latest_snapshot_hash"; then
echo "=== Hash has not changed; no need to upload/save the new tarball"
else
if test "$branch" = "master"; then
echo "=== Saving output for a Coverity run"
echo "$outputroot/$branch/hwloc-$latest_snapshot.tar.bz2" >> $pending_coverity
else
echo "=== NOT saving output for a Coverity run"
fi
echo "=== Posting tarball to open-mpi.org"
# tell the web server to cleanup old nightly tarballs
run_command ssh -p 2222 \
$output_ssh_target \
\"git/ompi/contrib/build-server/remove-old.pl 7 public_html/software/hwloc/nightly/$branch\"
# upload the new ones
run_command scp -P 2222 \
$outputroot/$branch/hwloc-$latest_snapshot.tar.* \
$output_ssh_target:public_html/software/hwloc/nightly/$branch/
run_command scp -P 2222 \
$outputroot/$branch/latest_snapshot.txt \
$output_ssh_target:public_html/software/hwloc/nightly/$branch/
# direct the web server to regenerate the checksums
run_command ssh -p 2222 \
$output_ssh_target \
\"cd public_html/software/hwloc/nightly/$branch \&\& md5sum hwloc\* \> md5sums.txt\"
run_command ssh -p 2222 \
$output_ssh_target \
\"cd public_html/software/hwloc/nightly/$branch \&\& sha1sum hwloc\* \> sha1sums.txt\"
fi
# Failed builds are not removed. But if a human forgets to come
# in here and clean up the old failed builds, we can accumulate
# many over time. So remove any old failed builds that are over
# 4 weeks old.
run_command ${script_dir}/remove-old.pl 7 $build_root/$branch
done
# If we had any new snapshots to send to coverity, process them now
for tarball in `cat $pending_coverity`; do
run_command ${script_dir}/hwloc-nightly-coverity.pl \
--filename=$tarball \
--coverity-token=$coverity_token \
--verbose \
--logfile-dir=$HOME/coverity \
--make-args=\"-j8\"
done
rm -f $pending_coverity

Просмотреть файл

@ -1,162 +0,0 @@
#!/usr/bin/env perl
use warnings;
use strict;
use Getopt::Long;
use File::Temp qw/ tempfile tempdir /;
use File::Basename;
my $coverity_project = "Open+MPI";
# Coverity changes this URL periodically
my $coverity_tool_url = "https://scan.coverity.com/download/cxx/linux64";
my $filename_arg;
my $coverity_token_arg;
my $dry_run_arg = 0;
my $verbose_arg = 0;
my $debug_arg = 0;
my $logfile_dir_arg = "/tmp";
my $configure_args = "";
my $make_args = "-j 32";
my $help_arg = 0;
&Getopt::Long::Configure("bundling");
my $ok = Getopt::Long::GetOptions("filename=s" => \$filename_arg,
"coverity-token=s" => \$coverity_token_arg,
"logfile-dir=s" => \$logfile_dir_arg,
"configure-args=s" => \$configure_args,
"make-args=s" => \$make_args,
"dry-run!" => \$dry_run_arg,
"verbose!" => \$verbose_arg,
"debug!" => \$debug_arg,
"help|h" => \$help_arg);
$ok = 0
if (!defined($filename_arg));
$ok = 0
if (!defined($coverity_token_arg));
if (!$ok || $help_arg) {
print "Usage: $0 --filename=FILENAME --coverity-token=TOKEN [--dry-run] [--verbose] [--help]\n";
exit($ok);
}
die "Cannot read $filename_arg"
if (! -r $filename_arg);
$verbose_arg = 1
if ($debug_arg);
######################################################################
sub verbose {
print @_
if ($verbose_arg);
}
# run a command and save the stdout / stderr
sub safe_system {
my $allowed_to_fail = shift;
my $cmd = shift;
my $stdout_file = shift;
# Redirect stdout if requested or not verbose
if (defined($stdout_file)) {
$stdout_file = "$logfile_dir_arg/$stdout_file";
unlink($stdout_file);
$cmd .= " >$stdout_file";
} elsif (!$debug_arg) {
$cmd .= " >/dev/null";
}
$cmd .= " 2>&1";
my $rc = system($cmd);
if (0 != $rc && !$allowed_to_fail) {
# If we die/fail, ensure to change out of the temp tree so
# that it can be removed upon exit.
chdir("/");
die "Command $cmd failed: exit status $rc";
}
system("cat $stdout_file")
if ($debug_arg && defined($stdout_file) && -f $stdout_file);
}
######################################################################
# Make an area to work
my $dir = tempdir(CLEANUP => 1);
chdir($dir);
verbose "*** Working in $dir\n";
######################################################################
# Get the coverity tool, put it in our path
my $cdir = "$ENV{HOME}/coverity";
safe_system(0, "mkdir $cdir")
if (! -d $cdir);
# Optimization: the tool is pretty large. If our local copy is less
# than a day old, just use that without re-downloading.
my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size,
$atime,$mtime,$ctime,$blksize,$blocks) =
stat("$cdir/coverity_tool.tgz");
my $now = time();
if (!defined($mtime) || $mtime < $now - 24*60*60) {
verbose "*** Downloading new copy of the coverity tool\n";
safe_system(0, "wget $coverity_tool_url --post-data \"token=$coverity_token_arg&project=$coverity_project\" -O coverity_tool.tgz");
safe_system(0, "cp coverity_tool.tgz $cdir");
}
verbose "*** Expanding coverity tool tarball\n";
safe_system(0, "tar xf $cdir/coverity_tool.tgz");
opendir(my $dh, ".") ||
die "Can't opendir .";
my @files = grep { /^cov/ && -d "./$_" } readdir($dh);
closedir($dh);
my $cov_dir = "$dir/$files[0]/bin";
$ENV{PATH} = "$cov_dir:$ENV{PATH}";
######################################################################
# Expand the OMPI tarball, build it
verbose "*** Extracting OMPI tarball\n";
safe_system(0, "tar xf $filename_arg");
my $tarball_filename = basename($filename_arg);
$tarball_filename =~ m/^openmpi-(.+)\.tar.+$/;
my $ompi_ver = $1;
chdir("openmpi-$ompi_ver");
verbose "*** Configuring OMPI tarball\n";
safe_system(0, "./configure $configure_args", "configure");
verbose "*** Building OMPI tarball\n";
safe_system(0, "cov-build --dir cov-int make $make_args", "cov-build");
# Tar up the Coverity results
verbose "*** Tarring up results\n";
safe_system(0, "tar jcf $ompi_ver-analyzed.tar.bz2 cov-int");
# If not dry-run, submit to Coverity
if ($dry_run_arg) {
verbose "*** Would have submitted, but this is a dry run\n";
} else {
verbose "*** Submitting results\n";
safe_system(0, "curl --form token=$coverity_token_arg " .
"--form email=jsquyres\@cisco.com " .
"--form file=\@$ompi_ver-analyzed.tar.bz2 " .
"--form version=$ompi_ver " .
"--form description=nightly-master " .
"https://scan.coverity.com/builds?project=$coverity_project",
"coverity-submit");
}
verbose("*** All done\n");
# Chdir out of the tempdir so that it can be removed
chdir("/");
exit(0);

Просмотреть файл

@ -1,192 +0,0 @@
#!/bin/sh
#####
#
# Configuration options
#
#####
# e-mail address to send results to
results_addr=testing@lists.open-mpi.org
#results_addr=rhc@open-mpi.org
# Set this to any value for additional output; typically only when
# debugging
: ${debug:=}
# svn repository uri
master_code_uri=https://github.com/open-mpi/ompi.git
master_raw_uri=https://raw.github.com/open-mpi/ompi
# where to put built tarballs - needs to be
# adjusted to match your site!
outputroot=$HOME/openmpi/nightly
# Target where to scp the final tarballs
output_ssh_target=ompiteam@192.185.39.252
# where to find the build script
script_uri=contrib/nightly/create_tarball.sh
# helper scripts dir
script_dir=$HOME/ompi/contrib/build-server
# The tarballs to make
if [ $# -eq 0 ] ; then
# We're no longer ever checking the 1.0 - 1.8 branches anymore
branches="master v1.10 v2.x v2.0.x v3.x"
else
branches=$@
fi
# Build root - scratch space
build_root=$HOME/openmpi/nightly-tarball-build-root
# Coverity stuff
coverity_token=`cat $HOME/coverity/openmpi-token.txt`
coverity_configure_args="--enable-debug --enable-mpi-fortran --enable-mpi-cxx --enable-mpi-java --enable-oshmem --enable-oshmem-fortran --with-usnic --with-libfabric=/mnt/data/local-installs"
export PATH=$HOME_PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$HOME_PREFIX/lib:$LD_LIBRARY_PATH
#####
#
# Actually do stuff
#
#####
debug() {
if test -n "$debug"; then
echo "=== DEBUG: $*"
fi
}
run_command() {
debug "Running command: $*"
debug "Running in pwd: `pwd`"
if test -n "$debug"; then
eval $*
else
eval $* > /dev/null 2>&1
fi
if test $? -ne 0; then
echo "=== Command failed: $*"
fi
}
# load the modules configuration
. $MODULE_INIT
module use $AUTOTOOL_MODULE
# get our nightly build script
mkdir -p $build_root
cd $build_root
pending_coverity=$build_root/tarballs-to-run-through-coverity.txt
rm -f $pending_coverity
touch $pending_coverity
# Loop making the tarballs
module unload autotools
for branch in $branches; do
echo "=== Branch: $branch"
# Get the last tarball version that was made
prev_snapshot=`cat $outputroot/$branch/latest_snapshot.txt`
prev_snapshot_hash=`echo $prev_snapshot | cut -d- -f3`
echo "=== Previous snapshot: $prev_snapshot (hash: $prev_snapshot_hash)"
code_uri=$master_code_uri
raw_uri=$master_raw_uri
# Form a URL-specific script name
script=$branch-`basename $script_uri`
echo "=== Getting script from: $raw_uri"
run_command wget --quiet --no-check-certificate --tries=10 $raw_uri/$branch/$script_uri -O $script
if test ! $? -eq 0 ; then
echo "wget of OMPI nightly tarball create script failed."
if test -f $script ; then
echo "Using older version of $script for this run."
else
echo "No build script available. Aborting."
exit 1
fi
fi
chmod +x $script
module load "autotools/ompi-$branch"
echo "=== Running script..."
run_command eval ./$script \
$build_root/$branch \
$results_addr \
$outputroot/$branch \
$code_uri \
$branch
module unload autotools
echo "=== Done running script"
# Did the script generate a new tarball? Ensure to compare the
# only the hash of the previous tarball and the hash of the new
# tarball (the filename also contains the date/timestamp, which
# will always be different).
# If so, save it so that we can spawn the coverity checker on it
# afterwards. Only for this for the master (for now).
latest_snapshot=`cat $outputroot/$branch/latest_snapshot.txt`
latest_snapshot_hash=`echo $latest_snapshot | cut -d- -f3`
echo "=== Latest snapshot: $latest_snapshot (hash: $latest_snapshot_hash)"
if test "$prev_snapshot_hash" = "$latest_snapshot_hash"; then
echo "=== Hash has not changed; no need to upload/save the new tarball"
else
if test "$branch" = "master"; then
echo "=== Saving output for a Coverity run"
echo "$outputroot/$branch/openmpi-$latest_snapshot.tar.bz2" >> $pending_coverity
else
echo "=== NOT saving output for a Coverity run"
fi
echo "=== Posting tarball to open-mpi.org"
# tell the web server to cleanup old nightly tarballs
run_command ssh -p 2222 \
$output_ssh_target \
\"git/ompi/contrib/build-server/remove-old.pl 7 public_html/nightly/$branch\"
# upload the new ones
run_command scp -P 2222 \
$outputroot/$branch/openmpi-$latest_snapshot.tar.* \
$output_ssh_target:public_html/nightly/$branch/
run_command scp -P 2222 \
$outputroot/$branch/latest_snapshot.txt \
$output_ssh_target:public_html/nightly/$branch/
# direct the web server to regenerate the checksums
run_command ssh -p 2222 \
$output_ssh_target \
\"cd public_html/nightly/$branch \&\& md5sum openmpi\* \> md5sums.txt\"
run_command ssh -p 2222 \
$output_ssh_target \
\"cd public_html/nightly/$branch \&\& sha1sum openmpi\* \> sha1sums.txt\"
fi
# Failed builds are not removed. But if a human forgets to come
# in here and clean up the old failed builds, we can accumulate
# many over time. So remove any old failed builds that are over
# 4 weeks old.
run_command ${script_dir}/remove-old.pl 7 $build_root/$branch
done
# If we had any new snapshots to send to coverity, process them now
for tarball in `cat $pending_coverity`; do
echo "=== Submitting $tarball to Coverity..."
run_command ${script_dir}/openmpi-nightly-coverity.pl \
--filename=$tarball \
--coverity-token=$coverity_token \
--verbose \
--logfile-dir=$HOME/coverity \
--make-args=-j4 \
--configure-args=\"$coverity_configure_args\"
done
rm -f $pending_coverity

Просмотреть файл

@ -1,162 +0,0 @@
#!/usr/bin/env perl
use warnings;
use strict;
use Getopt::Long;
use File::Temp qw/ tempfile tempdir /;
use File::Basename;
my $coverity_project = "open-mpi%2Fpmix";
# Coverity changes this URL periodically
my $coverity_tool_url = "https://scan.coverity.com/download/cxx/linux64";
my $filename_arg;
my $coverity_token_arg;
my $dry_run_arg = 0;
my $verbose_arg = 0;
my $debug_arg = 0;
my $logfile_dir_arg = "/tmp";
my $configure_args = "";
my $make_args = "-j 32";
my $help_arg = 0;
&Getopt::Long::Configure("bundling");
my $ok = Getopt::Long::GetOptions("filename=s" => \$filename_arg,
"coverity-token=s" => \$coverity_token_arg,
"logfile-dir=s" => \$logfile_dir_arg,
"configure-args=s" => \$configure_args,
"make-args=s" => \$make_args,
"dry-run!" => \$dry_run_arg,
"verbose!" => \$verbose_arg,
"debug!" => \$debug_arg,
"help|h" => \$help_arg);
$ok = 0
if (!defined($filename_arg));
$ok = 0
if (!defined($coverity_token_arg));
if (!$ok || $help_arg) {
print "Usage: $0 --filename=FILENAME --coverity-token=TOKEN [--dry-run] [--verbose] [--help]\n";
exit($ok);
}
die "Cannot read $filename_arg"
if (! -r $filename_arg);
$verbose_arg = 1
if ($debug_arg);
######################################################################
sub verbose {
print @_
if ($verbose_arg);
}
# run a command and save the stdout / stderr
sub safe_system {
my $allowed_to_fail = shift;
my $cmd = shift;
my $stdout_file = shift;
# Redirect stdout if requested or not verbose
if (defined($stdout_file)) {
$stdout_file = "$logfile_dir_arg/$stdout_file";
unlink($stdout_file);
$cmd .= " >$stdout_file";
} elsif (!$debug_arg) {
$cmd .= " >/dev/null";
}
$cmd .= " 2>&1";
my $rc = system($cmd);
if (0 != $rc && !$allowed_to_fail) {
# If we die/fail, ensure to change out of the temp tree so
# that it can be removed upon exit.
chdir("/");
die "Command $cmd failed: exit status $rc";
}
system("cat $stdout_file")
if ($debug_arg && defined($stdout_file) && -f $stdout_file);
}
######################################################################
# Make an area to work
my $dir = tempdir(CLEANUP => 1);
chdir($dir);
verbose "*** Working in $dir\n";
######################################################################
# Get the coverity tool, put it in our path.
my $cdir = "$ENV{HOME}/coverity";
safe_system(0, "mkdir $cdir")
if (! -d $cdir);
# Optimization: the tool is pretty large. If our local copy is less
# than a day old, just use that without re-downloading.
my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size,
$atime,$mtime,$ctime,$blksize,$blocks) =
stat("$cdir/coverity_tool.tgz");
my $now = time();
if (!defined($mtime) || $mtime < $now - 24*60*60) {
verbose "*** Downloading new copy of the coverity tool\n";
safe_system(0, "wget $coverity_tool_url --post-data \"token=$coverity_token_arg&project=$coverity_project\" -O coverity_tool.tgz");
safe_system(0, "cp coverity_tool.tgz $cdir");
}
verbose "*** Expanding coverity tool tarball\n";
safe_system(0, "tar xf $cdir/coverity_tool.tgz");
opendir(my $dh, ".") ||
die "Can't opendir .";
my @files = grep { /^cov/ && -d "./$_" } readdir($dh);
closedir($dh);
my $cov_dir = "$dir/$files[0]/bin";
$ENV{PATH} = "$cov_dir:$ENV{PATH}";
######################################################################
# Expand the PMIX tarball, build it
verbose "*** Extracting PMIX tarball\n";
safe_system(0, "tar xf $filename_arg");
my $tarball_filename = basename($filename_arg);
$tarball_filename =~ m/^pmix-(.+)\.tar.+$/;
my $pmix_ver = $1;
chdir("pmix-$pmix_ver");
verbose "*** Configuring PMIX tarball\n";
safe_system(0, "./configure $configure_args", "configure");
verbose "*** Building PMIX tarball\n";
safe_system(0, "cov-build --dir cov-int make $make_args", "cov-build");
# Tar up the Coverity results
verbose "*** Tarring up results\n";
safe_system(0, "tar jcf $pmix_ver-analyzed.tar.bz2 cov-int");
# If not dry-run, submit to Coverity
if ($dry_run_arg) {
verbose "*** Would have submitted, but this is a dry run\n";
} else {
verbose "*** Submitting results\n";
safe_system(0, "curl --form token=$coverity_token_arg " .
"--form email=rhc\@open-mpi.org " .
"--form file=\@$pmix_ver-analyzed.tar.bz2 " .
"--form version=$pmix_ver " .
"--form description=nightly-master " .
"https://scan.coverity.com/builds?project=$coverity_project",
"coverity-submit");
}
verbose("*** All done\n");
# Chdir out of the tempdir so that it can be removed
chdir("/");
exit(0);

Просмотреть файл

@ -1,196 +0,0 @@
#!/bin/sh
#####
#
# Configuration options
#
#####
# e-mail address to send results to
#results_addr=testing@lists.open-mpi.org
results_addr=rhc@open-mpi.org
# Set this to any value for additional output; typically only when
# debugging
: ${debug:=}
# svn repository uri
master_code_uri=https://github.com/pmix/master.git
master_raw_uri=https://raw.github.com/pmix/master
release_code_uri=https://github.com/pmix/releases.git
release_raw_uri=https://raw.github.com/pmix/releases
# where to put built tarballs
outputroot=$HOME/pmix/nightly
# Target where to scp the final tarballs
output_ssh_target=ompiteam@192.185.39.252
# where to find the build script
script_uri=contrib/nightly/create_tarball.sh
# helper scripts dir
script_dir=$HOME/ompi/contrib/build-server
# The tarballs to make
if [ $# -eq 0 ] ; then
branches="master"
else
branches=$@
fi
# Build root - scratch space
build_root=$HOME/pmix/nightly-tarball-build-root
# Coverity stuff
coverity_token=`cat $HOME/coverity/pmix-token.txt`
coverity_configure_args="--with-libevent=$HOME_PREFIX"
export PATH=$HOME_PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$HOME_PREFIX/lib:$LD_LIBRARY_PATH
#####
#
# Actually do stuff
#
#####
debug() {
if test -n "$debug"; then
echo "=== DEBUG: $*"
fi
}
run_command() {
debug "Running command: $*"
debug "Running in pwd: `pwd`"
if test -n "$debug"; then
eval $*
else
eval $* > /dev/null 2>&1
fi
if test $? -ne 0; then
echo "=== Command failed: $*"
fi
}
# load the modules configuration
. $MODULE_INIT
module use $AUTOTOOL_MODULE
# get our nightly build script
mkdir -p $build_root
cd $build_root
pending_coverity=$build_root/tarballs-to-run-through-coverity.txt
rm -f $pending_coverity
touch $pending_coverity
# Loop making the tarballs
module unload autotools
for branch in $branches; do
echo "=== Branch: $branch"
# Get the last tarball version that was made
prev_snapshot=`cat $outputroot/$branch/latest_snapshot.txt`
prev_snapshot_hash=`echo $prev_snapshot | cut -d- -f3`
echo "=== Previous snapshot: $prev_snapshot (hash: $prev_snapshot_hash)"
if test "$branch" = "master"; then
code_uri=$master_code_uri
raw_uri=$master_raw_uri
else
code_uri=$release_code_uri
raw_uri=$release_raw_uri
fi
# Form a URL-specific script name
script=$branch-`basename $script_uri`
echo "=== Getting script from: $raw_uri"
run_command wget --quiet --no-check-certificate --tries=10 $raw_uri/$branch/$script_uri -O $script
if test ! $? -eq 0 ; then
echo "wget of PMIX nightly tarball create script failed."
if test -f $script ; then
echo "Using older version of $script for this run."
else
echo "No build script available. Aborting."
exit 1
fi
fi
chmod +x $script
module load "autotools/pmix-$branch"
# module load "libevent/pmix-$branch"
echo "=== Running script..."
run_command ./$script \
$build_root/$branch \
$results_addr \
$outputroot/$branch \
$code_uri \
$branch
module unload autotools
echo "=== Done running script"
# Did the script generate a new tarball? Ensure to compare the
# only the hash of the previous tarball and the hash of the new
# tarball (the filename also contains the date/timestamp, which
# will always be different).
# If so, save it so that we can spawn the coverity checker on it
# afterwards. Only for this for the master (for now).
latest_snapshot=`cat $outputroot/$branch/latest_snapshot.txt`
latest_snapshot_hash=`echo $latest_snapshot | cut -d- -f3`
echo "=== Latest snapshot: $latest_snapshot (hash: $latest_snapshot_hash)"
if test "$prev_snapshot_hash" = "$latest_snapshot_hash"; then
echo "=== Hash has not changed; no need to upload/save the new tarball"
else
if test "$branch" = "master"; then
echo "=== Saving output for a Coverity run"
echo "$outputroot/$branch/pmix-$latest_snapshot.tar.bz2" >> $pending_coverity
else
echo "=== NOT saving output for a Coverity run"
fi
echo "=== Posting tarball to open-mpi.org"
# tell the web server to cleanup old nightly tarballs
run_command ssh -p 2222 \
$output_ssh_target \
\"git/ompi/contrib/build-server/remove-old.pl 7 public_html/software/pmix/nightly/$branch\"
# upload the new ones
run_command scp -P 2222 \
$outputroot/$branch/pmix-$latest_snapshot.tar.* \
$output_ssh_target:public_html/software/pmix/nightly/$branch/
run_command scp -P 2222 \
$outputroot/$branch/latest_snapshot.txt \
$output_ssh_target:public_html/software/pmix/nightly/$branch/
# direct the web server to regenerate the checksums
run_command ssh -p 2222 \
$output_ssh_target \
\"cd public_html/software/pmix/nightly/$branch \&\& md5sum pmix\* \> md5sums.txt\"
run_command ssh -p 2222 \
$output_ssh_target \
\"cd public_html/software/pmix/nightly/$branch \&\& sha1sum pmix\* \> sha1sums.txt\"
fi
# Failed builds are not removed. But if a human forgets to come
# in here and clean up the old failed builds, we can accumulate
# many over time. So remove any old failed bbuilds that are over
# 4 weeks old.
run_command ${script_dir}/remove-old.pl 7 $build_root/$branch
done
# If we had any new snapshots to send to coverity, process them now
for tarball in `cat $pending_coverity`; do
echo "=== Submitting $tarball to Coverity..."
run_command ${script_dir}/pmix-nightly-coverity.pl \
--filename=$tarball \
--coverity-token=$coverity_token \
--verbose \
--logfile-dir=$HOME/coverity \
--make-args=-j8 \
--configure-args=\"$coverity_configure_args\"
done
rm -f $pending_coverity

Просмотреть файл

@ -1,59 +0,0 @@
#!/usr/bin/env perl
use strict;
use warnings;
use POSIX qw(strftime);
my $happy = 1;
my $savedays = $ARGV[0];
my $dir = $ARGV[1];
$happy = 0
if ($savedays <= 0 || ! -d $dir);
die "Must specify number of days and a directory"
if (!$happy);
#------------------------------------------------------------------
# Read in all the dir entries
opendir(DIR, $dir) || die "Cannot open $dir";
my @files = readdir(DIR);
closedir(DIR);
# How many days to keep?
my $t = time() - ($savedays * 60 * 60 * 24);
print "Deleting anything in $dir before: " . strftime("%D", localtime($t)) . "\n";
my $to_delete = "";
# Check everything in the dir; if is a dir, is not . or .., and is
# older than the save date, keep it for deleting later.
foreach my $file (sort(@files)) {
if ($file ne "index.php" &&
$file ne "md5sums.txt" &&
$file ne "sha1sums.txt" &&
$file ne "latest_snapshot.txt" &&
$file ne "." &&
$file ne "..") {
my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size,
$atime,$mtime,$ctime,$blksize,$blocks) = stat("$dir/$file");
my $str = "SAVE";
if ($mtime < $t) {
$to_delete = "$to_delete $dir/$file";
$str = "DELETE";
}
print "Found $file: $str (mtime: " . strftime("%D", localtime($mtime)) . ")\n";
}
}
# If we found anything to delete, do so.
if ($to_delete ne "") {
print "Deleting: $to_delete\n";
system("chmod -R u=rwx $to_delete");
system("rm -rf $to_delete");
} else {
print "Nothing to delete!\n";
}
exit(0);

Просмотреть файл

@ -1,311 +0,0 @@
#!/bin/sh
#
# Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana
# University Research and Technology
# Corporation. All rights reserved.
# Copyright (c) 2004-2005 The University of Tennessee and The University
# of Tennessee Research Foundation. All rights
# reserved.
# Copyright (c) 2004-2005 High Performance Computing Center Stuttgart,
# University of Stuttgart. All rights reserved.
# Copyright (c) 2004-2005 The Regents of the University of California.
# All rights reserved.
# Copyright (c) 2006-2017 Cisco Systems, Inc. All rights reserved.
# $COPYRIGHT$
#
# Additional copyrights may follow
#
# $HEADER$
#
# This script is used to create a nightly snapshot tarball of Open MPI.
#
# $1: scratch root
# $2: e-mail address for destination
# $3: dest dir
# $4: git URL
# $5: git branch
#
scratch_root=$1
email=$2
destdir=$3
giturl=$4
gitbranch=$5
# Set this to any value for additional output; typically only when
# debugging
: ${debug:=}
# do you want a success mail?
want_success_mail=1
# max length of logfile to send in an e-mail
max_log_len=50
# how many snapshots to keep in the destdir?
max_snapshots=5
############################################################################
# Shouldn't need to change below this line
############################################################################
start_time="`date`"
# Sanity checks
if test -z "$scratch_root" -o -z "$email" -o -z "$giturl" -o -z "$gitbranch" \
-o -z "$destdir"; then
echo "$0 scratch_root email_addr dest_dir git_url git_branch"
exit 1
fi
# Use the branch name as the "version" string (for if there is an
# error). This version string will be replaced upon successful "make
# distcheck" with the real version.
version=$gitbranch
# send a mail
# should only be called after logdir is set
send_error_mail() {
outfile="$scratch_root/output.txt"
rm -f "$outfile"
touch "$outfile"
for file in `/bin/ls $logdir/* | sort`; do
len="`wc -l $file | awk '{ print $1}'`"
if test "`expr $len \> $max_log_len`" = "1"; then
echo "[... previous lines snipped ...]" >> "$outfile"
tail -n $max_log_len "$file" >> "$outfile"
else
cat "$file" >> "$outfile"
fi
done
Mail -s "=== CREATE FAILURE ($version) ===" "$email" < "$outfile"
rm -f "$outfile"
}
# send output error message
die() {
msg="$*"
cat > "$logdir/00_announce.txt" <<EOF
Creating the nightly tarball ended in error:
$msg
EOF
send_error_mail
exit 1
}
# do the work
# should only be called after logdir is set
do_command() {
cmd="$*"
logfile="$logdir/20-command.txt"
rm -f "$logfile"
if test -n "$debug"; then
echo "*** Running command: $cmd"
eval $cmd > "$logfile" 2>&1
st=$?
echo "*** Command complete: exit status: $st"
else
eval $cmd > "$logfile" 2>&1
st=$?
fi
if test "$st" != "0"; then
cat > "$logdir/15-error.txt" <<EOF
ERROR: Command returned a non-zero exist status ($version):
$cmd
Start time: $start_time
End time: `date`
=======================================================================
EOF
cat > "$logdir/25-error.txt" <<EOF
=======================================================================
Your friendly daemon,
Cyrador
EOF
send_error_mail
exit 1
fi
rm -f "$logfile"
}
# see if the destination directory exists
if test ! -d "$destdir"; then
mkdir -p "$destdir"
fi
if test ! -d "$destdir"; then
die "Could not cd to dest dir: $destdir"
fi
# make sure we can write to the destdir
file="$destdir/test-write.$$"
touch "$file"
if test ! -f "$file"; then
die "Could not write to the dest dir: $destdir"
fi
rm -f "$file"
# move into the scratch directory and ensure we have an absolute
# pathname for it
if test ! -d "$scratch_root"; then
mkdir -p "$scratch_root"
fi
if test ! -d "$scratch_root"; then
die "Could not cd to scratch root: $scratch_root"
fi
cd "$scratch_root"
scratch_root="`pwd`"
# setup target directory where clone+logs will go
clone_root="$scratch_root/ompi-`date +%Y-%m-%d-%H%M%S`"
rm -rf $clone_root
mkdir -p $clone_root
# startup the logfile (must be before do_command)
logdir="$clone_root/logs"
mkdir "$logdir"
# Get a fresh git clone
cd $clone_root
do_command "git clone $giturl ompi"
cd ompi
do_command "git checkout $gitbranch"
# Nightly tarballs are named in this format:
# openmpi-${BRANCHNAME}-${YYYYMMDDHHMM}-${SHORTHASH}.tar.${COMPRESSION}
timestamp=`date '+%Y%m%d%H%M'`
githash=`git log -n 1 '--pretty=format:%h'`
version="$gitbranch-$timestamp-$githash"
if test -n "$debug"; then
echo "*** This snapshot version: $version"
fi
# if there's a $destdir/latest_snapshot.txt, see if anything has
# happened since the version listed in that file
if test -f "$destdir/latest_snapshot.txt"; then
snapshot_version=`cat $destdir/latest_snapshot.txt`
if test -n "$debug"; then
echo "*** Last snapshot version: $snapshot_version"
fi
# Do we need a new snapshot?
# Snip the timestamp out of the versions and compare just
# ${BRANCHNAME}-${SHORTHASH}.
compare_version="$gitbranch-$githash"
compare_snapshot_version=`echo $snapshot_version | perl -p -e 's/^(.+?)-(\d+)-(.*+)$/$1-$3/'`
if test "$compare_version" = "$compare_snapshot_version"; then
if test -n "$debug"; then
echo "*** Our branch/git hash is the same as the last snapshot -- not doing anything"
fi
# Since we didn't do anything, there's no point in leaving the clone we
# just created
cd ..
rm -rf $clone_root
# All done... nothing to see here...
exit 0
fi
fi
if test -n "$debug"; then
echo "*** Houston: we're a go to make snapshot $version"
fi
# Ensure that VERSION is set to indicate that it wants a snapshot, and
# insert the actual value that we want (so that ompi_get_version.sh
# will report exactly that version).
sed -e 's/^repo_rev=.*/repo_rev='$githash/ \
-e 's/^tarball_version=.*/tarball_version='$version/ \
VERSION > VERSION.new
cp -f VERSION.new VERSION
rm -f VERSION.new
# lie about our username in $USER so that autogen will skip all
# .ompi_ignore'ed directories (i.e., so that we won't get
# .ompi_unignore'ed)
USER="ompibuilder"
export USER
# autogen is our friend
do_command "./autogen.pl --force"
# do config
do_command "./configure"
# Do make distcheck (which will invoke config/distscript.csh to set
# the right values in VERSION). distcheck does many things; we need
# to ensure it doesn't pick up any other installs via LD_LIBRARY_PATH.
# It may be a bit Draconian to totally clean LD_LIBRARY_PATH (i.e., we
# may need something in there), but at least in the current building
# setup, we don't. But be advised that this may need to change in the
# future...
save=$LD_LIBRARY_PATH
LD_LIBRARY_PATH=
do_command "make -j 8 distcheck"
LD_LIBRARY_PATH=$save
save=
# chmod the whole directory, so that core files are accessible by others
chmod a+rX -R .
# move the resulting tarballs to the destdir
gz="`/bin/ls openmpi*tar.gz`"
bz2="`/bin/ls openmpi*tar.bz2`"
mv $gz $bz2 $destdir
if test "$?" != "0"; then
cat <<EOF
ERROR -- move final tarballs to web tree failed!
From: `pwd`
Files: $gz $bz2
To: $destdir
The nightly snapshots are therefore not available on the web!
EOF
die "Could not move final tarballs to web dir: $destdir"
fi
cd $destdir
# make the latest_snapshot.txt file containing the last version
version="`echo $gz | sed -e 's/openmpi-\(.*\)\.tar\.gz/\1/g'`"
rm -f latest_snapshot.txt
echo $version > latest_snapshot.txt
# trim the destdir to $max_snapshots
for ext in gz bz2; do
count="`ls openmpi*.tar.$ext | wc -l | awk '{ print $1 }'`"
if test "`expr $count \> $max_snapshots`" = "1"; then
num_old="`expr $count - $max_snapshots`"
old="`ls -rt openmpi*.tar.$ext | head -n $num_old`"
rm -f $old
fi
done
# generate md5 and sha1 sums
rm -f md5sums.txt sha1sums.txt
touch md5sums.txt sha1sums.txt
for file in `/bin/ls *gz *bz2 | grep -v latest`; do
md5sum $file >> md5sums.txt
sha1sum $file >> sha1sums.txt
done
# remove temp dirs
cd "$scratch_root"
rm -rf "$root"
# send success mail
if test "$want_success_mail" = "1"; then
Mail -s "Create success ($version)" "$email" <<EOF
Creating nightly snapshot tarball was a success.
Snapshot: $version
Start time: $start_time
End time: `date`
Your friendly daemon,
Cyrador
EOF
fi