From 6a5a03b1de3e79aba68d909b4284129decc6c419 Mon Sep 17 00:00:00 2001 From: unknown Date: Tue, 26 Jan 2010 10:20:08 -0600 Subject: [PATCH] Import version as of 2010-01-26 for upload-ftp-v1.1.pl --- upload-ftp-v1.1.pl | 201 +++++++++++++++++++++++++++------------------ 1 file changed, 119 insertions(+), 82 deletions(-) diff --git a/upload-ftp-v1.1.pl b/upload-ftp-v1.1.pl index 6a33599..23a0c7c 100755 --- a/upload-ftp-v1.1.pl +++ b/upload-ftp-v1.1.pl @@ -14,7 +14,7 @@ # We expect to find (1) a directive file, specifying the directory to # store into (syntax details later); (2) the actual file to be # distributed (no signature); (3) a detached signature for (2). -# +# # For instance: # foo-1.2.tar.gz.directive.asc, signed (gpg --clearsign) ASCII text # about what to do with the other two files. @@ -26,8 +26,8 @@ # is set), in which case directive files with only a 'directory' directive are # allowed. # -# Alternatively, we expect to find only a directive file. This file must have -# a name that ends in 'directive.asc', and must contain one or more of these +# Alternatively, we expect to find only a directive file. This file must have +# a name that ends in 'directive.asc', and must contain one or more of these # directives: 'symlink', 'rmsymlink' and 'archive', in addition to the obligatory # 'directory' and 'version' directives. A 'filename' directive is not allowed. # @@ -57,7 +57,7 @@ # file and file to be uploaded, must be signed with a key from # the package's keyring. # If a file is to be uploaded into a subdirectory of the package -# directory, the subdirectory in the package config directory will +# directory, the subdirectory in the package config directory will # be checked for a pubring.gpg first, going up to the parent directory # until a match is found. # 2. $package_config_base/$package_name/email @@ -67,11 +67,11 @@ # This is written for use with ftp instead of as a cgi script because we # don't want to run an httpd on ftp.gnu.org. In general, it tries to do # the minimum possible. -# -# We execute gpgv, lsof, mkdir, mv, and pwd. Executions are such that +# +# We execute gpgv, lsof, mkdir, mv, pwd and tar. Executions are such that # it's not possible for the shell to be invoked. We make use of Perl # module Net::SMTP to send email. -# +# # Originally written by Karl Berry (karl@gnu.org), October 2003. # Additional changes by Paul Fisher (rao@gnu.org), November 2003 # Additional functionality (v1.1) by Ward Vandewege (ward@gnu.org), May 2004 @@ -127,10 +127,10 @@ my $incoming_dir = "/home/upload/incoming/$m_style"; # private dir on SAME FILESYSTEM as $incoming_dir: my $incoming_tmp = "/var/tmp/$m_style-in"; # top-level public ftp dir for installing files: -my $destfinal = "/home/$m_style/gnu"; +my $destfinal = "/home/$m_style/gnu"; $destfinal = "/home/ftp/$m_style" if ($m_style eq 'gnu+linux-distros'); # The distros go here # private dir on SAME FILESYSTEM as $destfinal: -my $olddestfinal = "/home/gatekpr/$m_style-archived"; +my $olddestfinal = "/home/gatekpr/$m_style-archived"; # private dir on SAME FILESYSTEM as $destfinal: my $desttmp = "/var/tmp/$m_style-out"; @@ -161,23 +161,23 @@ sub main setlogsock('unix'); openlog("ftp-upload", 'pid', $facility); ftp_syslog('info', "($log_style) Beginning upload processing run."); - + # make sure our directories all exist, or it's hopeless. # Use die instead of fatal - this error should "never" happen. for my $dir ($package_config_base, $incoming_dir, $incoming_tmp, $destfinal, $desttmp) { -d $dir || ftp_die("FATAL: configuration problem, $dir is not a directory"); } - + # the chdir simplifies our filename parsing, so the base names don't # have any directory. chdir ($incoming_dir) || ftp_die("FATAL: chdir($incoming_dir) failed: $!"); my @incoming = &scan_incoming (); - + # we've moved the files to work on to a new directory. chdir ($incoming_tmp) || ftp_die("FATAL: chdir($incoming_tmp) failed: $!"); - + for my $files (@incoming) { # each list element is a hash reference. ftp_syslog('info',"($log_style) found directive: $files->{directive}\n"); # if we die processing a triplet, the eval allows us to move @@ -185,14 +185,14 @@ sub main eval { # set up the %info variable my $retval = &read_directive_file ($files->{"directive"},$files->{"upload"},$files->{"directive_only"}); - + if ($retval != -1) { # do the work &execute_commands($files,%info); # report success if (!$files->{"directive_only"}) { - &success_upload($files->{"sig"}, $files->{"upload"},$files->{"directive"}); + &success_upload($files->{"sig"}, $files->{"upload"},$files->{"directive"}); } else { &success_directive($files->{directive}); } @@ -213,7 +213,7 @@ sub main # Clean up the incoming directory and the incoming tmp directory - remove files older than a day cleanup_dir($incoming_dir); cleanup_dir($incoming_tmp); - + return 0; } @@ -265,15 +265,15 @@ sub archive { # Actual executing of commands. Respects the cronological order -# they were specified in, thanks to the 'order' value in the %info +# they were specified in, thanks to the 'order' value in the %info # hash sub execute_commands { my $files = shift; my %info = @_; - # This is ugly but necessary. + # This is ugly but necessary. # Delete all info entries that are NOT hashes with an 'order' value - # (and hence would mess up the foreach loop below). Make a backup of + # (and hence would mess up the foreach loop below). Make a backup of # the hash first so we can feed the real thing to check_files & install_files my %originfo = %info; delete($info{directory}); @@ -293,7 +293,7 @@ sub execute_commands { my $cwd = `/bin/pwd`; $cwd =~ m,^(/[^\.\.]*)$, || &fatal("invalid directory $cwd",1,''); # Just make sure there are no double dots $cwd = $1; # so far so good, untainted - + chomp($cwd); # change to destination dir chdir($destdir); @@ -325,18 +325,18 @@ sub execute_commands { # described at the beginning). Ignore if we don't have all three files, # or if any of the files are still open, or if the filenames are dubious # -- things'll get cleaned up as needed separately. -# +# # If we accept a triplet, we rename the files into a temporary # directory. This is to avoid attackers overwriting files as or after # we check them. This is redundant protection -- the ftp config on # ftp.gnu.org does not allow overwrites or deletes. -# +# sub scan_incoming { my @ret; my %possible; # Get list of all possible files from incoming dir. - # + # opendir (INCOMING, $incoming_dir) || ftp_die("FATAL opendir($incoming_dir) failed: $!"); while (my $tainted_ent = readdir (INCOMING)) { @@ -344,7 +344,7 @@ sub scan_incoming { # subsequently. Omit files containing any other weird characters. next unless $tainted_ent =~ /^([\w_\+][-.\w_\+\~]*)$/; my $ent = $1; - + # Don't look at files with really long names, either. next if length ($ent) > 100; $possible{$ent} = 1; @@ -353,19 +353,19 @@ sub scan_incoming { # No possible files found, so return before we call lsof return @ret unless %possible; - + # Determine if any of those possible files are open. We find the # possible files before running lsof (partly) to avoid a race # condition. (If we ran lsof on the whole directory first, a new file # might be uploaded and possibly be processed even though it was open.) - # + # my %open; # # BTW, lsof -F0n mistakenly backslash-escapes newlines; fortunately, # we've already excluded filenames containing whitespace so this # cannot cause trouble. We use -F0n anyway, though, for redundant # protection against strange filenames. - # + # # We do have prepend $incoming_dir to make the possible names # absolute, since lsof outputs absolute names. # @@ -383,7 +383,7 @@ sub scan_incoming { } else { # child exec (@lsof_args) || ftp_die("FATAL: cannot exec lsof: $!"); } - + # For each remaining possibility, do some more checks for my $ent (keys %possible) { my $base = $ent; @@ -404,7 +404,7 @@ sub scan_incoming { # Do atomic rename to temp incoming directory before reading # anything, for safety. - # + # for my $f (($directive, $sig, $base)) { rename ($f, "$incoming_tmp/$f") || &fatal("rename $incoming_dir/$f to $incoming_tmp/$f failed: $!",0); @@ -464,7 +464,7 @@ sub scan_incoming { # Return array of public key files for PACKAGE_NAME. -# +# sub keyring_file { my ($package_name,$directory) = (shift,shift); my @directory = split(/\//,$directory); @@ -519,7 +519,7 @@ sub parse_directory_line { # can start with a . at all. $tainted_val =~ m,^(\w[-.\w]*(/\w[-.\w]*)*)$, || &fatal("invalid directory $tainted_val\n$directive_file_contents",1,$directive_file_contents); my $val = $1; # so far so good - + # A couple of subdir levels are ok, but don't allow hundreds. my $slash_count = ($val =~ tr,/,/,); &fatal("$slash_count slashes is too many, in $val",1,$directive_file_contents) if ($slash_count > 3 and not $do_not_fail); @@ -542,10 +542,10 @@ sub parse_directory_line { # Return the information for this upload out of DIRECTIVE_FILE -- # directory and package. Make sure the key that signed the directive # file has permission to write to this package, too. -# +# # We assume DIRECTIVE_FILE is clear-signed (gpg --clearsign). Among # other things, this lets us use gpgv everywhere, for paranoia's sake. -# +# sub read_directive_file { my ($directive_file) = shift; my ($uploaded_file) = shift; @@ -553,7 +553,7 @@ sub read_directive_file { # We default to v1.1 $info{'v1_compat_mode'} = 0; - + # For debugging purposes, see below my $directive_file_contents = ''; my @lines = (); @@ -561,8 +561,8 @@ sub read_directive_file { # Read the contents of the directive file. We require one # non-white non-pgp line: # Directory: dirname[/subdirname] - # - open (DIRECTIVE_FILE, "<", $directive_file) + # + open (DIRECTIVE_FILE, "<", $directive_file) || ftp_die("FATAL: open($directive_file) failed: $!"); my $cnt = 0; # Keep track of the order of directives... while () { @@ -586,7 +586,7 @@ sub read_directive_file { my $tmp_retval = &verify_keyring($directive_file,$directive_file_contents,@tmp_keyrings); push(@{$info{email}},$1) if ($tmp_retval =~ /Good signature from .*?<(.*?)>/); - + my $signed = 0; # If there is a command in the directive that doesn't require an actual file to work # on, we won't require the filename line in the directive file. This will allow people @@ -676,8 +676,8 @@ sub read_directive_file { # In that case, we need to double check a few things. # This is permitted IF $V1_COMPAT_ALLOWED is true, AND if the only directive is a 'directory'. # (for backwards compatibility with older versions of the script) - # It is also permitted if the directive file contains commands that don't require - # a filename - currently symlink, rmsymlink, and archive - and only the directive file was + # It is also permitted if the directive file contains commands that don't require + # a filename - currently symlink, rmsymlink, and archive - and only the directive file was # uploaded if (!exists($info{filename})) { my $directory_command_only = 1; @@ -731,7 +731,7 @@ sub read_directive_file { # # To fix this, we require a 'filename:' line in the directive file # that needs to match the name of the uploaded file. - + # Filename has to match the name of the uploaded file &fatal("The filename directive does not match name of the uploaded file.\n\n Filename directive: $info{filename}{value}\n Uploaded file: $uploaded_file\n",1) if ($uploaded_file ne $info{filename}{value}); @@ -751,34 +751,34 @@ sub read_directive_file { # Now check that the timestamp of signing for the directive is not older # than the one for the last file that was uploaded - # This is only relevant when a 'filename' directive is present, hence the - # test of the $filename_required variable. + # This is only relevant when a 'filename' directive is present, hence the + # test of the $filename_required variable. # WHY IS THIS ONLY RELEVANT WHEN WE HAVE A 'filename' DIRECTIVE? SHOULD WE # NOT ALWAYS CHECK THIS? WVW, 2006-04-07 if (($retval =~ /Signature made (.*?) using/) && ($filename_required)) { my $timestr = $1; # If the time/date string starts with a weekday (e.g. "Wed Apr 28 16:40:03 2004 EDT"), # chop off the weekday - Date::Manip doesn't like it - $timestr =~ s/^[a-z]+? ([a-z]+)/$1/i; + $timestr =~ s/^[a-z]+? ([a-z]+)/$1/i; - # We need to convert time/date strings like "Apr 28 16:40:03 2004 EDT" into + # We need to convert time/date strings like "Apr 28 16:40:03 2004 EDT" into # "Apr 28 16:40:03 2004 EDT" for Date::Manip to understand them... $timestr =~ s/^([a-z]+? +\d{1,2}) (\d{2}:\d{2}:\d{2}) (\d{4}) (.*)$/$1 $3 $2 $4/i; - + my $date = ParseDate($timestr); my $epoch = UnixDate($date,"%s"); # Verify that this timestamp is not too far in the future. We allow a discrepancy of 1 day so we don't have to worry about timezones my $now = time(); if ($epoch > ($now + 24*3600)) { - &fatal("GPG signed upload from the future - not allowed. Please make sure your clock is set correctly, resign the directive file, and upload again. You may have to wait 24 hours before re-uploading if you do not change the filename for your triplet.",1); + &fatal("GPG signed upload from the future - not allowed. Please make sure your clock is set correctly, resign the directive file, and upload again. You may have to wait 24 hours before re-uploading if you do not change the filename for your triplet.",1); } - # Now we need to flock the our 'serials' file; + # Now we need to flock the our 'serials' file; # verify the epoch value there/update it, etc. my %serials = (); my @serials = (); - + if (!-e $serials_path) { open(SERIALS,">$serials_path"); flock(SERIALS,2); # Take exclusive lock @@ -800,7 +800,7 @@ sub read_directive_file { # Verify that this is really a new version of the file! if (exists($serials{$full_filename}) && ($serials{$full_filename} >= $epoch)) { flock(SERIALS,4); # Release lock - &fatal("Gpg signed upload older than/same timestamp as existing version - not allowed. In other words, the filenames for the triplet you have uploaded are an exact match for a triplet that has been uploaded in the past, and the directive file that you just uploaded has been signed before or at the same time as the directive file for the triplet that was uploaded earlier. Most likely, you are re-uploading an old triplet.",1); + &fatal("Gpg signed upload older than/same timestamp as existing version - not allowed. In other words, the filenames for the triplet you have uploaded are an exact match for a triplet that has been uploaded in the past, and the directive file that you just uploaded has been signed before or at the same time as the directive file for the triplet that was uploaded earlier. Most likely, you are re-uploading an old triplet.",1); } $serials{$full_filename} = $epoch; @@ -826,7 +826,7 @@ sub guess_uploader_email { } -# +# # Verify that the signature used for the directive file is valid for # this package's keyring. We go through all keyring files, starting at the # sub-most directory, until we find one that matches (or not!) @@ -837,7 +837,7 @@ sub verify_keyring { # We need what gpgv writes to STDERR to determine the timestamp # Hence the silly trick with storing the return code of gpgv in # the command output - my @verify_args = ("/usr/bin/gpgv", "--keyring", $_, + my @verify_args = ("/usr/bin/gpgv", "--keyring", $_, $directive_file,"2>&1",";echo \$?"); my $verify_str = join(' ',@verify_args); @@ -864,26 +864,64 @@ sub verify_keyring { # Before checking the files, move them to a temporary directory. -# +# # Check that the key is on the keyring for this package, and that # SIG_FILE and UPLOAD_FILE are good. -# +# sub check_files { my $files = shift; my %info = @_; my ($sig_file,$upload_file) = ($files->{"sig"}, $files->{"upload"}); - + my @keyrings = &keyring_file ($info{package},$info{directory}); &fatal("no keyring for package $info{package}",0) if ($#keyrings < 0); - foreach (@keyrings) { - # Verify that the file has been correctly signed with a valid signature. - my @verify_args = ("/usr/bin/gpgv", "--keyring", $_, - $sig_file, $upload_file); - return if (!system (@verify_args)); - } - &fatal("gpg verify of upload file ($upload_file) failed",1); + my $valid = 0; + foreach my $keyring (@keyrings) { + # Verify that the file has been correctly signed with a valid signature. + my @verify_args = ("/usr/bin/gpgv", "--keyring", $keyring, + $sig_file, $upload_file); + if (!system (@verify_args)) { + $valid = 1; + last; + } + } + $valid + or &fatal("gpg verify of upload file ($upload_file) failed",1); + + # Reject an upload tarball if it contains a Makefile.in vulnerable + # as described in CVE-2009-4029. + # http://thread.gmane.org/gmane.comp.sysutils.autotools.announce/131 + if ($upload_file =~ /\.(tar|)(\.|$)|\.t[bglx]z|\.tbz2$/) { + # First check if the file contains any Makefile.in files + my $tar_cmd = "/bin/tar -tf $upload_file"; + open (TAR, "$tar_cmd|") + or &fatal("failed to run command: $tar_cmd",1); + my $found_makefile = 0; + while (defined (my $line = )) { + if ($line =~ /Makefile.in/i) { + $found_makefile = 1; + last; + } + } + return if (!$found_makefile); + # If it does, check inside them + $tar_cmd = "/bin/tar --to-stdout -x -f $upload_file --wildcards '*/Makefile.in'"; + open (TAR, "$tar_cmd|") + or &fatal("failed to run command: $tar_cmd",1); + my $found_bad = 0; + while (defined (my $line = )) { + $line =~ /-perm -777 -exec chmod/ + and $found_bad = 1; + } + close TAR + or &fatal("failed to close pipe to '$tar_cmd'",1); + $found_bad + and &fatal("upload rejected: $upload_file contains a vulnerable " + . "Makefile.in (CVE-2009-4029);\n" + . "Regenerate it with automake 1.11.1 or newer.",1,'',3); + } } @@ -891,7 +929,7 @@ sub check_files { # Install both SIG_FILE and UPLOAD_FILE in $destfinal/$info{directory}. # Make the directory if it doesn't exist (for, e.g., a new gcc/x.y.z # subdir). When the destination file exists, archive it automatically first. -# +# sub install_files { my $files = shift; my %info = @_; @@ -907,26 +945,26 @@ sub install_files { -d $destdir || &fatal("no directory $destdir",1); my ($t1, $t2) = (0,0); - - # We now allow overwriting of files - without warning!! + + # We now allow overwriting of files - without warning!! if (-e "$destdir/$sig_file") { archive($destdir, $info{directory}, $sig_file); ftp_syslog('info', "($log_style) archived and overwrote $destdir/$sig_file with uploaded version"); $t1 = 1; - } + } if (-e "$destdir/$upload_file") { archive($destdir, $info{directory}, $upload_file); ftp_syslog('info', "($log_style) overwrote $destdir/$upload_file with uploaded version"); $t2 = 1; - } + } my $notification_str = ''; $notification_str .= "Archived and overwrote $destdir/$sig_file with uploaded version\n" if ($t1); $notification_str .= "Archived and overwrote $destdir/$upload_file with uploaded version\n" if ($t2); &mail ($notification_str) if ($notification_str ne ''); - + # Do we need a subdirectory on $desttmp as well? Can't quite picture # when we'd have a collision, so skip that for now. - # + # for my $f (($sig_file, $upload_file)) { my @mv_args = ("/bin/mv", $f, "$desttmp/$f"); &fatal("@mv_args failed",0) if system (@mv_args) != 0; @@ -935,7 +973,7 @@ sub install_files { # Do atomic rename (if the system crashes between or during the mv's, # too bad :). This is so we don't ever have a partial file that could # be found by mirrors, etc. - # + # for my $f (($sig_file, $upload_file)) { chmod 0644, "$desttmp/$f"; rename ("$desttmp/$f", "$destdir/$f") @@ -946,12 +984,12 @@ sub install_files { # Report success and unlink the directive file. -# +# sub success_upload { my ($sig_file,$upload_file,$directive_file) = @_; - + &mail ("upload of $upload_file and $sig_file complete",1); - + unlink ($directive_file) || ftp_warn("unlink($directive_file) failed: $!"); } @@ -996,11 +1034,11 @@ sub cleanup { # address specified for the package. Rename the bad files with a # leading . so we don't try to process them again. Finally, write the # same MSG to stderr and exit badly. -# +# # It's ok that we quit here without processing every file, because we'll # be invoked again from cron in a few minutes and will look further then. # The bad . files will eventually get cleaned up via a separate script. -# +# sub fatal { my ($tainted_msg) = shift; my ($send_to_user) = shift; @@ -1022,7 +1060,7 @@ sub fatal { $tainted_msg =~ s=[^-.:,/@\w\s]==g; $tainted_msg =~ m=^([-.:,/@\w\s]+)$=; my $msg = $1; - + &mail ($msg,$send_to_user); my $pid = open(PWD, "-|"); @@ -1040,7 +1078,7 @@ sub fatal { } # Used for both success and failure. -# +# sub mail { my ($msg) = shift; my ($send_to_user) = shift; @@ -1085,7 +1123,7 @@ sub mail { $smtp->mail ($sender); $smtp->bcc ($email_always) if ($send_to_user); $smtp->recipient (@email_list, { SkipBad => 1}); - + $smtp->data (); $smtp->datasend ("To: " . join (", ", @email_list) . "\n"); $smtp->datasend ("From: $sender\n"); @@ -1103,12 +1141,12 @@ sub mail { } $smtp->datasend ("\n\n"); ftp_syslog('info', "($log_style) Body: $msg"); - + # Wrap message at 78 characters, this is e-mail... $Text::Wrap::columns=78; $smtp->datasend (wrap('','',$msg) . "\n"); $smtp->dataend (); - + $smtp->quit (); } } @@ -1123,7 +1161,7 @@ sub debug { ftp_die("FATAL: SMTP connection failed") unless $smtp; $smtp->mail ("ftp-upload-script\@gnu.org"); $smtp->recipient ($maintainer_email, { SkipBad => 1}); - + $smtp->data (); $smtp->datasend ("To: $maintainer_email\n"); $smtp->datasend ("From: ftp-upload-script\@gnu.org\n"); @@ -1151,7 +1189,7 @@ sub ftp_die($$) { sub ftp_syslog { my ($priority,$message) = @_; - # The syslog function is pretty picky, and (sometimes) dies silently + # The syslog function is pretty picky, and (sometimes) dies silently # when using non-valid syslog priorities. # That's why we run it inside an eval, and print out any errors to STDERR. eval { @@ -1168,4 +1206,3 @@ sub ftp_syslog { # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-end: "$" # End: - -- 2.25.1