--- /dev/null
+#!/usr/local/bin/perl -Tw
+#
+# Take files that have been uploaded via ftp and move them into place on
+# ftp.gnu.org.
+#
+# This file is free software; as a special exception the author gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
+# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# We expect to find (1) a directive file, specifying the directory to
+# store into (syntax details later); (2) the actual file to be
+# distributed (no signature); (3) a detached signature for (2).
+#
+# For instance:
+# foo-1.2.tar.gz.directive.asc, signed (gpg --clearsign) ASCII text
+# about what to do with the other two files.
+# foo-1.2.tar.gz, the file to upload (could be anything) with no sig.
+# foo-1.2.tar.gz.sig, detached binary signature (gpg -b) for uploaded file.
+#
+# The directive file must always contain a 'directory', 'version' and
+# 'filename' directive, unless when run in compatibility mode ($V1_COMPAT_MODE
+# is set), in which case directive files with only a 'directory' directive are
+# allowed.
+#
+# Alternatively, we expect to find only a directive file. This file must have
+# a name that ends in 'directive.asc', and must contain one or more of these
+# directives: 'symlink', 'rmsymlink' and 'archive', in addition to the obligatory
+# 'directory' and 'version' directives. A 'filename' directive is not allowed.
+#
+# A 'comment' directive is always allowed.
+#
+# Permissions of directives are controlled on a per package basis. For each
+# package, one or more seperate GPG public keyrings are maintained. All signed
+# files that are part of a triplet must be signed by a permitted key for the
+# specific package named in the directive file. Each package is only allowed
+# write access to it's own directory (with the same name as the package) within
+# the public ftp hierarchy. Write access to subdirectories of that directory is
+# allowed. Limiting write access to specific directories on a per package basis
+# minizes the impact from the compromise of a maintainer's GPG private key. The
+# public keyrings form a hierarchy where keys in superdirectories can apply to
+# files uploaded to subdirectories. Example:
+#
+# Jim can only upload to aspell/dict/en so his key is located in
+# packages/aspell/dict/en/pubring.gpg. Paul can upload to any aspell
+# directory so his key is in packages/aspell/pubring.gpg. When Paul
+# uploads a file to aspell/dict/en it looks for his key in
+# packages/aspell/dict/en/pubring.gpg first, then finds it in
+# packages/aspell/pubring.gpg.
+#
+# Package config files are:
+# 1. $package_config_base/$package_name/pubring.gpg
+# GNUPG public keyring. For a given package, the directive
+# file and file to be uploaded, must be signed with a key from
+# the package's keyring.
+# If a file is to be uploaded into a subdirectory of the package
+# directory, the subdirectory in the package config directory will
+# be checked for a pubring.gpg first, going up to the parent directory
+# until a match is found.
+# 2. $package_config_base/$package_name/email
+# Email addresses that are sent mail when uploads succeed or
+# fail for a given package. One email address per line.
+#
+# This is written for use with ftp instead of as a cgi script because we
+# don't want to run an httpd on ftp.gnu.org. In general, it tries to do
+# the minimum possible.
+#
+# We execute gpgv, lsof, mkdir, mv, and pwd. Executions are such that
+# it's not possible for the shell to be invoked. We make use of Perl
+# module Net::SMTP to send email.
+#
+# Originally written by Karl Berry (karl@gnu.org), October 2003.
+# Additional changes by Paul Fisher (rao@gnu.org), November 2003
+# Additional functionality (v1.1) by Ward Vandewege (ward@gnu.org), May 2004
+# Additional changes (syslog) by Justin Baugh (baughj@gnu.org), August 2005
+# Additional testing and bugfixes by Ward Vandewege (ward@gnu.org), Apr 2006
+
+use strict;
+use Net::SMTP;
+use Date::Manip;
+use Sys::Syslog qw(:DEFAULT setlogsock);
+use Getopt::Long;
+umask (022);
+
+$ENV{"LC_ALL"} = "C"; # do not think about multibyte characters
+
+# Clean env so we can run subprograms, notably gpgv and lsof.
+$ENV{"PATH"} = "/usr/bin:/bin:/usr/sbin";
+delete @ENV{qw(IFS CDPATH ENV BASH_ENV)};
+
+my $V1_COMPAT_ALLOWED = 1;
+
+my $NAME = 'upload-ftp-v1.1.pl';
+my $VERSION = '1.1'; # This is the protocol version
+my $DATE = '2006/12/04 11:00:44';
+my $AUTHOR = "the Free Software Foundation <sysadmin\@gnu.org>";
+my $COPYRIGHT = "2003-2006";
+my $LICENSE = "GPL - http://www.fsf.org/licenses/gpl.txt";
+my $URL = "http://www.gnu.org/prep/maintain/html_node/Automated-FTP-Uploads.html";
+
+#my $NAME = $CVSNAME;
+#$NAME =~ s/^.RCSfile: (.*?),v .$/$1/g;
+#my $VERSION = $CVSVERSION;
+#$VERSION =~ s/^.Revision: (.*?) .$/$1/g;
+#my $DATE = $CVSDATE;
+#$DATE =~ s/^.Date: (.*?) .*?.$/$1/g;
+
+my $style = '';
+my $help = '';
+my $version = '';
+# Set this to 1 or higher to get debug output in the log file.
+my $DEBUG = 1;
+
+GetOptions ("style=s" => \$style, "debug=i" => \$DEBUG, "help" => \$help, "version" => \$version);
+
+&version_information () if ($version);
+&usage_information() if ($help);
+&usage_information() if (($style ne 'ftp') && ($style ne 'alpha') && ($style ne 'distros'));
+
+my $m_style = 'ftp';
+$m_style = 'alpha' if ($style eq 'alpha');
+$m_style = 'gnu+linux-distros' if ($style eq 'distros');
+
+# Settings to configure:
+my $package_config_base = "/home/gatekpr/packages";
+# where ftpd deposits the files for us to look at:
+my $incoming_dir = "/home/upload/incoming/$m_style";
+# private dir on SAME FILESYSTEM as $incoming_dir:
+my $incoming_tmp = "/var/tmp/$m_style-in";
+# top-level public ftp dir for installing files:
+my $destfinal = "/home/$m_style/gnu";
+$destfinal = "/home/ftp/$m_style" if ($m_style eq 'gnu+linux-distros'); # The distros go here
+# private dir on SAME FILESYSTEM as $destfinal:
+my $olddestfinal = "/home/gatekpr/$m_style-archived";
+# private dir on SAME FILESYSTEM as $destfinal:
+my $desttmp = "/var/tmp/$m_style-out";
+
+my $master_keyring = "/home/gatekpr/etc/master_pubring.gpg";
+
+my $log_style = 'GNU';
+$log_style = 'Alpha' if ($style eq 'alpha');
+$log_style = 'Distros' if ($style eq 'distros');
+
+# maintainer e-mail address
+my $maintainer_email = "ward\@gnu.org";
+
+my $serials_path = "/home/gatekpr/etc/upload-ftp-serials.txt";
+
+my $email_always = 'ftp-upload-script@gnu.org'; # e.g., ftp-upload@gnu.org
+
+# syslog destination
+my $facility = "LOCAL5";
+
+my %info; # package being processed; a global so fatal and mail can use it
+
+exit (&main ());
+
+sub main
+{
+
+ # Initialize our syslogging
+ setlogsock('unix');
+ openlog("ftp-upload", 'pid', $facility);
+ ftp_syslog('info', "($log_style) Beginning upload processing run.");
+
+ # make sure our directories all exist, or it's hopeless.
+ # Use die instead of fatal - this error should "never" happen.
+ for my $dir ($package_config_base, $incoming_dir, $incoming_tmp,
+ $destfinal, $desttmp) {
+ -d $dir || ftp_die("FATAL: configuration problem, $dir is not a directory");
+ }
+
+ # the chdir simplifies our filename parsing, so the base names don't
+ # have any directory.
+ chdir ($incoming_dir) || ftp_die("FATAL: chdir($incoming_dir) failed: $!");
+ my @incoming = &scan_incoming ();
+
+ # we've moved the files to work on to a new directory.
+ chdir ($incoming_tmp) || ftp_die("FATAL: chdir($incoming_tmp) failed: $!");
+
+ for my $files (@incoming) { # each list element is a hash reference.
+ ftp_syslog('info',"($log_style) found directive: $files->{directive}\n");
+ # if we die processing a triplet, the eval allows us to move
+ # onto the next triplet.
+ eval {
+ # set up the %info variable
+ &read_directive_file ($files->{"directive"},$files->{"upload"},$files->{"directive_only"});
+
+ # do the work
+ &execute_commands($files,%info);
+
+ # report success
+ if (!$files->{"directive_only"}) {
+ &success_upload($files->{"sig"}, $files->{"upload"},$files->{"directive"});
+ } else {
+ &success_directive($files->{directive});
+ }
+ };
+
+ # clean up files if we abort while processing a triplet
+ cleanup ($files->{"sig"}, $files->{"upload"}, $files->{"directive"}) if ($@);
+ # clear out the current package that we just finished processing
+ undef %info;
+ }
+ if ((scalar @incoming) == 0) {
+ ftp_syslog('info', "($log_style) No files found for processing.");
+ } else {
+ ftp_syslog('info', "($log_style) Processing complete: " . (scalar @incoming) . " uploads processed.");
+ }
+
+ # Clean up the incoming directory and the incoming tmp directory - remove files older than a day
+ cleanup_dir($incoming_dir);
+ cleanup_dir($incoming_tmp);
+
+ return 0;
+}
+
+sub usage_information {
+ my $retval = "\n$NAME protocol v$VERSION ($DATE)\n";
+ $retval .= "More information at $URL\n";
+ $retval .= "\nERROR: You have not supplied all required parameters. $NAME takes these arguments:\n\n";
+ $retval .= " $NAME -s <style> [-d <debuglevel>] [-v] [-h]\n\n";
+ $retval .= " <style> is the execution 'style'. Call $NAME\n";
+ $retval .= " without the -s parameter to get a list of possible styles.\n";
+ $retval .= " -d <debuglevel> (optional) set debug level. 0 means no debugging\n";
+ $retval .= " -v (optional) display version information\n";
+ $retval .= " -h (optional) display this help screen\n\n";
+ $retval .= "Possible styles:\n\n";
+ $retval .= " ftp\n";
+ $retval .= " alpha\n";
+ $retval .= " distros\n";
+ $retval .= "\n";
+ print $retval;
+ exit;
+}
+
+sub version_information {
+ print "\nThis is $NAME protocol version $VERSION ($DATE)\n";
+ print "Copyright (c) $COPYRIGHT by $AUTHOR\n";
+ print "License: $LICENSE\n";
+ print "More information at $URL\n\n";
+ exit;
+}
+
+sub archive {
+ my ($dir, $subdir, $file) = @_;
+
+ # Abort if file to archive doesn't exist
+ &fatal("$subdir/$file does not exist - can not archive",1) if (!-e "$destfinal/$subdir/$file");
+ use POSIX qw(strftime);
+ my $timestamp = strftime "%Y-%m-%d_%H-%M-%S", localtime;
+ $timestamp .= sprintf("_%09d",rand(1000000000)); # Add a large random number for good measure
+ # Abort if a file with same name exists in the archive
+ &fatal("$subdir/$file exists in archive - can not overwrite",1) if (-e "$olddestfinal/$subdir/$timestamp" . "_$file");
+
+ my @mkdir_args = ("/bin/mkdir","-p","$olddestfinal/$subdir");
+ &fatal("@mkdir_args failed",0) if system (@mkdir_args) != 0;
+ my @mv_args = ("/bin/mv", "$dir/$file", "$olddestfinal/$subdir/$timestamp" . "_$file");
+ &fatal("@mv_args failed",0) if system (@mv_args) != 0;
+ ftp_syslog('info', "($log_style) archived $dir/$file to $olddestfinal/$subdir/$timestamp" . "_$file");
+
+}
+
+
+# Actual executing of commands. Respects the cronological order
+# they were specified in, thanks to the 'order' value in the %info
+# hash
+sub execute_commands {
+ my $files = shift;
+ my %info = @_;
+
+ # This is ugly but necessary.
+ # Delete all info entries that are NOT hashes with an 'order' value
+ # (and hence would mess up the foreach loop below). Make a backup of
+ # the hash first so we can feed the real thing to check_files & install_files
+ my %originfo = %info;
+ delete($info{directory});
+ delete($info{email});
+ delete($info{package});
+ delete($info{version});
+ delete($info{'v1_compat_mode'});
+
+ my $destdir = "$destfinal/$originfo{directory}";
+ foreach my $key (sort { $info{$a}{order} <=> $info{$b}{order} } keys %info) {
+ if ($key eq 'filename') {
+ &check_files($files,%originfo);
+ &install_files($files,%originfo);
+ } elsif ($key =~ /^symlink-(.*)/) {
+ # Get current working dir
+ my $cwd = `pwd`;
+ chomp($cwd);
+ # change to destination dir
+ chdir($destdir);
+ # symlink away!
+ symlink("$1",$info{$key}{link}) || &fatal("creation of symlink $info{$key}{link} to $1 in $destdir failed: $!",1);
+ # go back to current working dir
+ chdir($cwd);
+ ftp_syslog('info', "($log_style) added symlink $destdir/" . $info{$key}{link} . " pointing to $destdir/$1");
+ } elsif ($key =~ /^rmsymlink-(.*)/) {
+ unlink("$destdir/$1") || &fatal("removal of symlink $1 failed: $!",1);
+ ftp_syslog('info', "($log_style) removed symlink $destdir/$1");
+ } elsif ($key =~ /^archive-(.*)/) {
+ archive($destdir, $originfo{directory}, $1);
+ archive($destdir, $originfo{directory}, "$1.sig")
+ }
+ }
+
+ # We're running in v1 mode.
+ if ($originfo{'v1_compat_mode'}) {
+ &check_files($files,%originfo);
+ &install_files($files,%originfo);
+ }
+}
+
+\f
+# Read the ftp incoming dir (which is assumed to be the current
+# directory), looking for completed upload triples (the three files
+# described at the beginning). Ignore if we don't have all three files,
+# or if any of the files are still open, or if the filenames are dubious
+# -- things'll get cleaned up as needed separately.
+#
+# If we accept a triplet, we rename the files into a temporary
+# directory. This is to avoid attackers overwriting files as or after
+# we check them. This is redundant protection -- the ftp config on
+# ftp.gnu.org does not allow overwrites or deletes.
+#
+sub scan_incoming {
+ my @ret;
+
+ my %possible;
+ # Get list of all possible files from incoming dir.
+ #
+ opendir (INCOMING, $incoming_dir)
+ || ftp_die("FATAL opendir($incoming_dir) failed: $!");
+ while (my $tainted_ent = readdir (INCOMING)) {
+ # don't look at files with a leading dot or dash, but allow those chars
+ # subsequently. Omit files containing any other weird characters.
+ next unless $tainted_ent =~ /^([\w_\+][-.\w_\+]*)$/;
+ my $ent = $1;
+
+ # Don't look at files with really long names, either.
+ next if length ($ent) > 100;
+ $possible{$ent} = 1;
+ }
+ closedir (INCOMING) || ftp_die("FATAL: closedir($incoming_dir) failed: $!");
+
+ # No possible files found, so return before we call lsof
+ return @ret unless %possible;
+
+ # Determine if any of those possible files are open. We find the
+ # possible files before running lsof (partly) to avoid a race
+ # condition. (If we ran lsof on the whole directory first, a new file
+ # might be uploaded and possibly be processed even though it was open.)
+ #
+ my %open;
+ #
+ # BTW, lsof -F0n mistakenly backslash-escapes newlines; fortunately,
+ # we've already excluded filenames containing whitespace so this
+ # cannot cause trouble. We use -F0n anyway, though, for redundant
+ # protection against strange filenames.
+ #
+ # We do have prepend $incoming_dir to make the possible names
+ # absolute, since lsof outputs absolute names.
+ #
+
+ my @lsof_args = ("/home/gatekpr/bin/lsof", "-Fn",
+ map { "$incoming_dir/$_" } keys %possible);
+ my $pid = open (LSOF, "-|");
+
+ if ($pid) { # parent
+ while (defined (my $line = <LSOF>)) {
+ next unless $line =~ /^n${incoming_dir}\/(.+)$/; # only look at the name lines.
+ delete ($possible{$1}) || ftp_warn("WARNING: lsof found unrequested but open $1?!");
+ }
+ close (LSOF);
+ } else { # child
+ exec (@lsof_args) || ftp_die("FATAL: cannot exec lsof: $!");
+ }
+
+ # For each remaining possibility, do some more checks
+ for my $ent (keys %possible) {
+ my $base = $ent;
+ my $sig = "$base.sig";
+ my $directive = "$base.directive.asc";
+ my $bare_base = $base;
+ $bare_base =~ s/\.directive\.asc$//g;
+
+ # work on this triple, if all three files exist, and the signature
+ # and directive files aren't huge. We want to exclude huge files
+ # here, before even reading the directive file; otherwise, perl could
+ # consume lots of memory reading it.
+ if (exists($possible{$base}) && exists($possible{$sig}) && exists($possible{$directive})
+ && (-s $directive < 50*1024) && (-s $sig < 50*1024)) {
+ push (@ret, { "directive" => $directive, "sig" => $sig,
+ "upload" => $base, "directive_only" => 0 });
+ ftp_syslog('info', "($log_style) processing [$directive:$sig:$base]");
+
+ # Do atomic rename to temp incoming directory before reading
+ # anything, for safety.
+ #
+ for my $f (($directive, $sig, $base)) {
+ rename ($f, "$incoming_tmp/$f")
+ || &fatal("rename $incoming_dir/$f to $incoming_tmp/$f failed: $!",0);
+ }
+
+ # don't bother to try any part of this triple again.
+ delete $possible{$base};
+ delete $possible{$sig};
+ delete $possible{$directive};
+ } elsif (exists($possible{$base}) && !exists($possible{"$bare_base.sig"}) && ($base =~ /\.directive\.asc$/)) {
+ # Directive file only, no actual file to deal with
+ # This can happen when dealing with symlink/rmsymlink/archive options
+ push (@ret, { "directive" => $base, "sig" => '',
+ "upload" => '', "directive_only" => 1 });
+ # Do atomic rename to temp incoming directory before reading
+ # anything, for safety.
+ #
+ rename ($base, "$incoming_tmp/$base")
+ || &fatal("rename $incoming_dir/$base to $incoming_tmp/$base failed: $!",0);
+
+ delete $possible{$base};
+ }
+ }
+
+ return @ret;
+}
+
+
+\f
+# Return array of public key files for PACKAGE_NAME.
+#
+sub keyring_file {
+ my ($package_name,$directory) = (shift,shift);
+ my @directory = split(/\//,$directory);
+ my @pubrings = ();
+
+ # First of all, add our 'master' keyring, for people with root to the ftp upload mechanism
+ push(@pubrings,$master_keyring);
+
+ # We go through each subdirectory, starting at the lowest subdirectory,
+ # and add each to an array of public key files
+ my $tmp = $directory;
+ while (1) {
+ if (-e "$package_config_base/$tmp/pubring.gpg") {
+ ftp_syslog('debug', "($log_style) DEBUG: " . "$package_config_base/$tmp/pubring.gpg") if $DEBUG;
+ push(@pubrings,"$package_config_base/$tmp/pubring.gpg");
+ }
+ my $tmp2 = $tmp;
+ $tmp2 =~ s/\/[^\/]*$//;
+ last if ($tmp eq $tmp2);
+ $tmp = $tmp2;
+ }
+
+ return @pubrings;
+}
+
+sub email_addresses {
+ my ($package_name) = @_;
+ my @ret;
+
+ open (EMAIL_FILE, "<", "$package_config_base/$package_name/email")
+ || &fatal("open $package_name email config failed: $!",0);
+
+ while (<EMAIL_FILE>) {
+ chomp;
+ push (@ret, $_)
+ if $_ =~ /^[[:graph:]]+@[[:graph:]]+$/; # simple sanity check
+ }
+
+ close (EMAIL_FILE) || ftp_warn("close($package_config_base/$package_name/email) failed: $!");
+ return @ret;
+}
+
+
+\f
+# Return the information for this upload out of DIRECTIVE_FILE --
+# directory and package. Make sure the key that signed the directive
+# file has permission to write to this package, too.
+#
+# We assume DIRECTIVE_FILE is clear-signed (gpg --clearsign). Among
+# other things, this lets us use gpgv everywhere, for paranoia's sake.
+#
+sub read_directive_file
+{
+ my ($directive_file) = shift;
+ my ($uploaded_file) = shift;
+ my ($directive_only) = shift;
+
+ # We default to v1.1
+ $info{'v1_compat_mode'} = 0;
+
+ # Read the contents of the directive file. We require one
+ # non-white non-pgp line:
+ # Directory: dirname[/subdirname]
+ #
+ # We don't handle deletions if the wrong thing is uploaded.
+ # That'll take manual intervention (i.e., email to maintainers@gnu.org).
+ open (DIRECTIVE_FILE, "<", $directive_file)
+ || ftp_die("FATAL: open($directive_file) failed: $!");
+
+ my $signed = 0;
+ # If there is a command in the directive that doesn't require an actual file to work
+ # on, we won't require the filename line in the directive file. This will allow people
+ # to upload a directive file only to archive/create symlinks/remove symlinks
+ my $filename_required = 1;
+
+ # For debugging purposes, see below
+ my $directive_file_contents = '';
+
+ my $cnt = 0; # Keep track of the order of directives...
+ while (<DIRECTIVE_FILE>) {
+ my $line = $_;
+ $line =~ s/\r\n/\n/g; # deal with dos-based line endings...
+ $line =~ s/\s+$/\n/; # Some people like to put spaces after their commands
+ $line =~ s/^\s+//; # Or even *before* their commands
+ last if ($line =~ /^-----BEGIN PGP SIGNATURE/);
+ if ($line =~ /^-----BEGIN PGP SIGNED MESSAGE-----$/) {
+ $signed = 1;
+ next;
+ }
+ next if ($line =~ /^Hash:/);
+ next if ($line =~ /^\s*$/);
+ # Just make sure we don't parse any lines that are NOT part of the signed message!
+ # GPG will make sure that a line that looks like "-----BEGIN PGP SIGNED MESSAGE-----"
+ # will be escaped before signing a message that contains it
+ next if (!$signed);
+
+ $directive_file_contents .= $line;
+
+ my ($tainted_cmd,$tainted_val) = split(' ',$line,2);
+ if ($tainted_cmd =~ /^Directory:?$/i) { # case-insensitive, w or w/o the :
+ # Can't let it start with - . / or contain strange characters.
+ # This disallows .. as a file name component since no component
+ # can start with a . at all.
+ $tainted_val =~ m,^(\w[-.\w]*(/\w[-.\w]*)*)$, || &fatal("invalid directory $tainted_val",1);
+ my $val = $1; # so far so good
+
+ # A couple of subdir levels are ok, but don't allow hundreds.
+ my $slash_count = ($val =~ tr,/,/,);
+ &fatal("$slash_count /'s is too many, in $val",1) if $slash_count > 3;
+
+ # Only let them specify one directory directive.
+ &fatal("invalid second directory $val, have $info{directory}",1)
+ if exists $info{"directory"};
+
+ $info{"directory"} = $val; # ok.
+ ($info{"package"} = $val) =~ s,/.*$,,; # top-level name, no subdir
+ # Set email addresses
+ push (@{$info{email}}, email_addresses ($info{package}));
+ } elsif ($tainted_cmd =~ /^Filename:?$/i) { # case-insensitive, w or w/o the :
+ # We use the same filename restrictions as scan_incoming
+ $tainted_val =~ /^([\w_\+][-.\w_\+]*)$/ || &fatal("invalid filename $tainted_val",1);
+ my $val = $1; # so far so good
+
+ # Only let them specify one filename directive.
+ &fatal("invalid second filename $val, have $info{filename}",1)
+ if exists $info{"filename"};
+
+ $info{"filename"} = {"value" => $val, "order" => $cnt++}; # ok.
+ } elsif ($tainted_cmd =~ /^Version:?$/i) { # case-insensitive, w or w/o the :
+ $tainted_val =~ /^(\d+\.\d+)$/ || &fatal("invalid version $tainted_val",1);
+ my $val = $1; # so far so good
+
+ # We only support version 1.1 right now!
+ &fatal("invalid version $val, not supported",1) if ($val ne '1.1');
+
+ # Only let them specify one version directive.
+ &fatal("invalid second version $val, have $info{version}",1) if exists $info{"version"};
+
+ $info{"version"} = $val; #ok.
+ } elsif ($tainted_cmd =~ /^symlink:?$/i) { # case-insensitive, w or w/o the :
+ $tainted_val =~ /^([\w_\+][-.\w_\+\/]*)\s+([\w_\+][-.\w_\+\/]*)$/ || &fatal("invalid parameters for symlink command: $tainted_val",1);
+ my ($target,$link) = ($1,$2); # so far so good
+ &fatal("invalid parameters for symlink command(2): $tainted_val",1) if ($target =~ /\.\./);
+ $info{"symlink-$target"} = {"link" => $link, "order" => $cnt++}; #ok.
+ } elsif ($tainted_cmd =~ /^rmsymlink:?$/i) { # case-insensitive, w or w/o the :
+ $tainted_val =~ /^([\w_\+][-.\w_\+\/]*)$/ || &fatal("invalid parameters for rmsymlink command: $tainted_val",1);
+ my $val = $1; # so far so good
+ &fatal("invalid parameters for rmsymlink command(2): $tainted_val",1) if ($val =~ /\.\./);
+ $info{"rmsymlink-$1"} = {"order" => $cnt++}; #ok.
+ } elsif ($tainted_cmd =~ /^archive:?$/i) { # case-insensitive, w or w/o the :
+ $tainted_val =~ /^([\w_\+][-.\w_\+\/]*)$/ || &fatal("invalid parameters for archive command: $tainted_val",1);
+ my $val = $1; # so far so good
+ &fatal("invalid parameters for archive command(2): $tainted_val",1) if ($val =~ /\.\./);
+
+ $info{"archive-$1"} = {"order" => $cnt++}; #ok.
+ } elsif ($tainted_cmd =~ /^comment:?$/i) { # case-insensitive, w or w/o the :
+ # Comments are ok, we ignore them
+ } else {
+ &fatal("unrecognized directive $_",1);
+ }
+ }
+ close (DIRECTIVE_FILE) || ftp_warn("close($directive_file) failed: $!");
+
+ # Phone home. E-mail the contents of the directive file to the maintainer, for
+ # debugging purposes.
+ debug($directive_file_contents) if $DEBUG;
+
+
+ # They have to specify a directory.
+ &fatal("no directory directive specified in $directive_file",1)
+ if ! $info{"directory"};
+
+
+ # There are a few possibilities regarding the 'filename' directive
+ # 1. It exists in the directive file - there is no problem
+ # 2. It doesn't exist in the directive file
+ # In that case, we need to double check a few things.
+ # This is permitted IF $V1_COMAT_ALLOWED is true, AND if the only directive is a 'directory'.
+ # (for backwards compatibility with older versions of the script)
+ # It is also permitted if the directive file contains commands that don't require
+ # a filename - currently symlink, rmsymlink, and archive - and only the directive file was
+ # uploaded
+ if (!exists($info{filename})) {
+ my $directory_command_only = 1;
+ foreach (keys %info) {
+ $directory_command_only = 0 if (($_ ne 'directory') && ($_ ne 'package') && ($_ ne 'version') && ($_ ne 'v1_compat_mode') && ($_ ne 'email'));
+ }
+ # This is where we would check for commands that require a file.
+ # In this version (1.1), there are none, so all we do is check
+ # that only the directive file was uploaded
+
+ if (!$directive_only) {
+ # We have three files
+ # Are we in version 1.0 compatibility mode?
+ if ($V1_COMPAT_ALLOWED) {
+ # We're in backwards compatibility mode
+ # That means: three files, and ONLY a directory directive in the directive file
+ $info{'v1_compat_mode'} = 1;
+ if ($directory_command_only == 0) {
+ &fatal("no filename directive specified in $directive_file",1)
+ } else {
+ ftp_syslog('info',"($log_style) running in legacy V1 compatibility mode");
+ }
+ } elsif (!$V1_COMPAT_ALLOWED) {
+ # This is not allowed - we require a filename directive. No backwards compatibility.
+ &fatal("no filename directive specified in $directive_file. Upgrade to v1.1! See http://www.gnu.org/prep/maintain/maintain.html",1)
+ }
+ } else {
+ # We only have a directive file
+ # Do we have something to do?
+ &fatal("nothing to do - no commands in directive file",1) if ($directory_command_only == 1);
+ }
+ $filename_required = 0;
+ }
+ ftp_syslog('info',"($log_style) running in v1.1 mode") if (!$info{'v1_compat_mode'});
+
+ # Configuration must exist for the package
+ -d $package_config_base . '/' . $info{"package"}
+ || &fatal("no configuration directory for package $info{package}",0);
+
+ # Check that we have a keyring for this package:
+ my @keyrings = &keyring_file ($info{package},$info{directory});
+ &fatal("no keyring for package $info{package}",0) if ($#keyrings < 0);
+
+ if ($filename_required) {
+ # Ben Pfaff <blp@cs.stanford.edu> wrote:
+ # First, `gpg -b' doesn't verify that the filename of the signed
+ # data is correct. This means that I can rename gcc-1.2.3.tar.gz
+ # to gcc-3.4.5.tar.gz and the signature will still verify
+ # correctly. This opens up the possibility for confusion, but in
+ # itself it's not a huge deal.
+ #
+ # To fix this, we require a 'filename:' line in the directive file
+ # that needs to match the name of the uploaded file.
+
+ # Filename has to match the name of the uploaded file
+ &fatal("filename ($info{filename}{value}) does not match name of the uploaded file ($uploaded_file)",1)
+ if ($uploaded_file ne $info{filename}{value});
+
+ # Filename has to match the name of this directive file (a bit paranoid, but hey...)
+ &fatal("filename $info{filename}{value} does not match name of directive file $directive_file",1)
+ if ($directive_file ne "$info{filename}{value}.directive.asc");
+ }
+
+ # They have to specify a version unless we're in 1.0 compatibility mode
+ if (!$info{'v1_compat_mode'}) {
+ &fatal("no version directive specified in $directive_file",1)
+ if ! $info{"version"};
+ }
+
+ my $retval = &verify_keyring($directive_file,@keyrings);
+
+ # Now check that the timestamp of signing for the directive is not older
+ # than the one for the last file that was uploaded
+ # This is only relevant when a 'filename' directive is present, hence the
+ # test of the $filename_required variable.
+ # WHY IS THIS ONLY RELEVANT WHEN WE HAVE A 'filename' DIRECTIVE? SHOULD WE
+ # NOT ALWAYS CHECK THIS? WVW, 2006-04-07
+ if (($retval =~ /Signature made (.*?) using/) && ($filename_required)) {
+ my $timestr = $1;
+ # If the time/date string starts with a weekday (e.g. "Wed Apr 28 16:40:03 2004 EDT"),
+ # chop off the weekday - Date::Manip doesn't like it
+ $timestr =~ s/^[a-z]+? ([a-z]+)/$1/i;
+
+ # We need to convert time/date strings like "Apr 28 16:40:03 2004 EDT" into
+ # "Apr 28 16:40:03 2004 EDT" for Date::Manip to understand them...
+ $timestr =~ s/^([a-z]+? +\d{1,2}) (\d{2}:\d{2}:\d{2}) (\d{4}) (.*)$/$1 $3 $2 $4/i;
+
+ my $date = ParseDate($timestr);
+ my $epoch = UnixDate($date,"%s");
+
+ # Verify that this timestamp is not too far in the future. We allow a discrepancy of 1 day so we don't have to worry about timezones
+ my $now = time();
+ if ($epoch > ($now + 24*3600)) {
+ &fatal("GPG signed upload from the future - not allowed. Please make sure your clock is set correctly, resign the directive file, and upload again. You may have to wait 24 hours before re-uploading if you do not change the filename for your triplet.",1);
+ }
+
+ # Now we need to flock the our 'serials' file;
+ # verify the epoch value there/update it, etc.
+ my %serials = ();
+ my @serials = ();
+
+ if (!-e $serials_path) {
+ open(SERIALS,">$serials_path");
+ flock(SERIALS,2); # Take exclusive lock
+ } else {
+ open(SERIALS,"+<$serials_path");
+ flock(SERIALS,2); # Take exclusive lock
+ @serials = <SERIALS>;
+ foreach (@serials) {
+ my ($tmp1,$tmp2) = /(.*?):(.*?)\n/;
+ $tmp1 =~ s/\s+//g;
+ $tmp2 =~ s/\s+//g;
+ $serials{$tmp1} = $tmp2;
+ }
+ }
+ seek(SERIALS,0,0);
+ my $full_filename = $info{"directory"} . '/' . $uploaded_file;
+ $full_filename =~ s/\/\//\//g; # Just in case...
+
+ # Verify that this is really a new version of the file!
+ if (exists($serials{$full_filename}) && ($serials{$full_filename} >= $epoch)) {
+ flock(SERIALS,4); # Release lock
+ &fatal("Gpg signed upload older than/same timestamp as existing version - not allowed. In other words, the filenames for the triplet you have uploaded are an exact match for a triplet that has been uploaded in the past, and the directive file that you just uploaded has been signed before or at the same time as the directive file for the triplet that was uploaded earlier. Most likely, you are re-uploading an old triplet.",1);
+ }
+
+ $serials{$full_filename} = $epoch;
+
+ foreach my $key (keys %serials) {
+ print SERIALS "$key:$serials{$key}\n";
+ }
+
+ flock(SERIALS,4); # Release lock
+ close(SERIALS);
+ } elsif ($filename_required) {
+ &fatal("gpg verification problem: could not extract timestamp",1);
+ }
+
+ return %info;
+}
+
+\f
+#
+# Verify that the signature used for the directive file is valid for
+# this package's keyring. We go through all keyring files, starting at the
+# sub-most directory, until we find one that matches (or not!)
+#
+sub verify_keyring {
+ my ($directive_file, @keyrings) = @_;
+ foreach (@keyrings) {
+ # We need what gpgv writes to STDERR to determine the timestamp
+ # Hence the silly trick with storing the return code of gpgv in
+ # the command output
+ my @verify_args = ("/usr/bin/gpgv", "--keyring", $_,
+ $directive_file,"2>&1",";echo \$?");
+
+ my $verify_str = join(' ',@verify_args);
+
+ my $retval = `$verify_str`;
+ if (!defined($retval)) {
+ # This is bad - we couldn't even execute the gpgv command properly
+ &fatal("gpg verify of directive file failed: $!",1);
+ } elsif ($retval =~ /\n0\n$/s) { # We store the return value of gpgv on the last line of the output
+ ftp_syslog('info', "($log_style) verified against $_\n");
+ return $retval; # We got return value 0 from gpgv -> key verified!
+ }
+ }
+ &fatal("gpg verify of directive file failed",1);
+}
+
+\f
+# Before checking the files, move them to a temporary directory.
+#
+# Check that the key is on the keyring for this package, and that
+# SIG_FILE and UPLOAD_FILE are good.
+#
+sub check_files {
+ my $files = shift;
+ my %info = @_;
+
+ my ($sig_file,$upload_file) = ($files->{"sig"}, $files->{"upload"});
+
+ my @keyrings = &keyring_file ($info{package},$info{directory});
+ &fatal("no keyring for package $info{package}",0) if ($#keyrings < 0);
+
+ foreach (@keyrings) {
+ # Verify that the file has been correctly signed with a valid signature.
+ my @verify_args = ("/usr/bin/gpgv", "--keyring", $_,
+ $sig_file, $upload_file);
+ return if (!system (@verify_args));
+ }
+ &fatal("gpg verify of upload file ($upload_file) failed",1);
+}
+
+
+\f
+# Install both SIG_FILE and UPLOAD_FILE in $destfinal/$info{directory}.
+# Make the directory if it doesn't exist (for, e.g., a new gcc/x.y.z
+# subdir). When the destination file exists, archive it automatically first.
+#
+sub install_files {
+ my $files = shift;
+ my %info = @_;
+
+ my ($sig_file,$upload_file) = ($files->{"sig"}, $files->{"upload"});
+ my $destdir = "$destfinal/$info{directory}";
+ # File::Path is 200 lines of Perl and requires reading an external
+ # text file. In my mind, it is a toss-up as to whether that or
+ # forking the system mkdir is safer. We could debate endlessly,
+ # change it if you like, let's move on ...
+ my @mkdir_args = ("/bin/mkdir", "-p", $destdir);
+ system (@mkdir_args);
+ -d $destdir || &fatal("no directory $destdir",1);
+
+ my ($t1, $t2) = (0,0);
+
+ # We now allow overwriting of files - without warning!!
+ if (-e "$destdir/$sig_file") {
+ archive($destdir, $info{directory}, $sig_file);
+ ftp_syslog('info', "($log_style) archived and overwrote $destdir/$sig_file with uploaded version");
+ $t1 = 1;
+ }
+ if (-e "$destdir/$upload_file") {
+ archive($destdir, $info{directory}, $upload_file);
+ ftp_syslog('info', "($log_style) overwrote $destdir/$upload_file with uploaded version");
+ $t2 = 1;
+ }
+ my $notification_str = '';
+ $notification_str .= "Archived and overwrote $destdir/$sig_file with uploaded version\n" if ($t1);
+ $notification_str .= "Archived and overwrote $destdir/$upload_file with uploaded version\n" if ($t2);
+ &mail ($notification_str) if ($notification_str ne '');
+
+ # Do we need a subdirectory on $desttmp as well? Can't quite picture
+ # when we'd have a collision, so skip that for now.
+ #
+ for my $f (($sig_file, $upload_file)) {
+ my @mv_args = ("/bin/mv", $f, "$desttmp/$f");
+ &fatal("@mv_args failed",0) if system (@mv_args) != 0;
+ }
+
+ # Do atomic rename (if the system crashes between or during the mv's,
+ # too bad :). This is so we don't ever have a partial file that could
+ # be found by mirrors, etc.
+ #
+ for my $f (($sig_file, $upload_file)) {
+ chmod 0644, "$desttmp/$f";
+ rename ("$desttmp/$f", "$destdir/$f")
+ || &fatal("rename($desttmp/$f, $destdir/$f) failed: $!",0);
+ }
+}
+
+
+\f
+# Report success and unlink the directive file.
+#
+sub success_upload {
+ my ($sig_file,$upload_file,$directive_file) = @_;
+
+ &mail ("upload of $upload_file and $sig_file complete");
+
+ unlink ($directive_file) || ftp_warn("unlink($directive_file) failed: $!");
+}
+
+sub success_directive {
+ my ($directive_file) = shift;
+ &mail ("processing of $directive_file complete");
+ unlink ($directive_file) || ftp_warn("unlink($directive_file) failed: $!");
+}
+
+sub cleanup_dir {
+ my $dir = shift;
+ opendir(DIR, $dir) || ftp_die("Can’t opendir $dir in cleanup_dir: $!");
+ my @files = grep { ! /^\./ && -f "$dir/$_" } readdir(DIR);
+ closedir DIR;
+
+ foreach my $file (@files) {
+ my @tmp = stat("$dir/$file");
+ $file =~ /^(.*)$/; $file = $1;
+ my $mtime = $tmp[9];
+ $mtime =~ /^(.*)$/; $mtime = $1;
+ ftp_syslog('debug',"($log_style) DEBUG: Removing $file, older than 24 hours (mtime: $tmp[9])\n") if ((time() > ($tmp[9]+24*3600)) && ($DEBUG > 0));
+ unlink ("$dir/.$file"); # don't worry if it doesn't exist
+ rename ("$dir/$file", "$dir/.$file") if (time() > ($mtime+24*3600));
+ }
+}
+
+sub cleanup {
+ for my $dir ($incoming_dir, $incoming_tmp, $desttmp) {
+ for my $f (@_) {
+ ftp_syslog('debug',"($log_style) DEBUG: cleaning up $dir/$f\n") if ($DEBUG > 1);
+ # if we quit early enough, they might not be there.
+ next unless defined $f && -e "$dir/$f";
+
+ unlink ("$dir/.$f"); # don't worry if it doesn't exist
+ rename ("$dir/$f", "$dir/.$f"); # save one backup
+ }
+ }
+}
+
+\f
+# Send email with TAINTED_MSG to the ftp maintainers, as well as any
+# address specified for the package. Rename the bad files with a
+# leading . so we don't try to process them again. Finally, write the
+# same MSG to stderr and exit badly.
+#
+# It's ok that we quit here without processing every file, because we'll
+# be invoked again from cron in a few minutes and will look further then.
+# The bad . files will eventually get cleaned up via a separate script.
+#
+sub fatal {
+ my ($tainted_msg) = shift;
+ my ($send_to_user) = shift;
+ print STDERR "$tainted_msg\n";
+
+ # Don't let them do perl or shell quoting tricks, but show everything
+ # that's definitely harmless.
+ #
+ $tainted_msg =~ s=[^-.:,/@\w\s]==g;
+ $tainted_msg =~ m=^([-.:,/@\w\s]+)$=;
+ my $msg = $1;
+
+ &mail ($msg,$send_to_user);
+
+ ftp_syslog('err', "($log_style) $msg");
+
+ my $pid = open(PWD, "-|");
+ my $cwd;
+
+ if ($pid) { # parent
+ while (<PWD>) {
+ chomp ($cwd = $_);
+ }
+ close (PWD) || ftp_warn("pwd exited $?");
+ } else { # child
+ exec ("/bin/pwd")
+ || ftp_die("can't exec pwd: $!");
+ }
+ ftp_die("(in $cwd) $msg");
+}
+
+# Used for both success and failure.
+#
+sub mail {
+ my ($msg) = shift;
+ my ($send_to_user) = shift;
+
+ my @email_list = ($email_always);
+ # Some messages should be sent to the user, some should not
+ push (@email_list, @{$info{email}}) if (defined $info{email} && $send_to_user);
+
+ # print "final emails: @email_list\n";
+ # return @_;
+
+ my $smtp = Net::SMTP->new ("127.0.0.1");
+ ftp_die("FATAL: SMTP connection failed") unless $smtp;
+ $smtp->mail ("ftp-upload-script\@gnu.org");
+ $smtp->recipient (@email_list, { SkipBad => 1});
+
+ $smtp->data ();
+ $smtp->datasend ("To: " . join (", ", @email_list) . "\n");
+ $smtp->datasend ("From: ftp-upload-script\@gnu.org\n");
+ $smtp->datasend ("Reply-To: ftp-upload\@gnu.org\n");
+ if (defined $info{package}) {
+ $smtp->datasend ("Subject: [gnu-ftp-upload] $info{package}");
+ ftp_syslog('info', "($log_style) " . $info{package} . ": $msg");
+ } else {
+ $smtp->datasend ("Subject: [gnu-ftp-upload] generic failure");
+ ftp_syslog('warning', "($log_style) Error uploading package: $msg");
+ }
+ $smtp->datasend ("\n\n");
+ $smtp->datasend ("$msg\n");
+ $smtp->dataend ();
+
+ $smtp->quit ();
+}
+
+sub debug {
+ my $msg = shift;
+ my $smtp = Net::SMTP->new ("127.0.0.1");
+ ftp_die("FATAL: SMTP connection failed") unless $smtp;
+ $smtp->mail ("ftp-upload-script\@gnu.org");
+ $smtp->recipient ($maintainer_email, { SkipBad => 1});
+
+ $smtp->data ();
+ $smtp->datasend ("To: $maintainer_email\n");
+ $smtp->datasend ("From: ftp-upload-script\@gnu.org\n");
+ $smtp->datasend ("Reply-To: ftp-upload\@gnu.org\n");
+ $smtp->datasend ("Subject: [$m_style gnu-ftp-debug] new upload processed");
+ $smtp->datasend ("\n\n");
+ $smtp->datasend ("$msg\n");
+ $smtp->dataend ();
+ $smtp->quit ();
+}
+
+sub ftp_warn($) {
+ ftp_syslog('warning', "($log_style) " . $_[0]);
+ warn $_[0];
+}
+
+sub ftp_die($) {
+ ftp_syslog('err', "($log_style) " . $_[0]);
+ exit 1;
+}
+
+sub ftp_syslog {
+ my ($priority,$message) = @_;
+ # The syslog function is pretty picky, and (sometimes) dies silently
+ # when using non-valid syslog priorities.
+ # That's why we run it inside an eval, and print out any errors to STDERR.
+ eval {
+ syslog($priority, $message);
+ };
+ if ($@) {
+ print STDERR "$@\n";
+ }
+}
+
+# Local Variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "Version "
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-end: "$"
+# End:
+