#!/usr/bin/perl -w -T # <@LICENSE> # Copyright 2004 Apache Software Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # my $VERSION = 'svn' . (split(/\s+/, '$Id$'))[2]; my $PREFIX = '@@PREFIX@@'; # substituted at 'make' time my $DEF_RULES_DIR = '@@DEF_RULES_DIR@@'; # substituted at 'make' time my $LOCAL_RULES_DIR = '@@LOCAL_RULES_DIR@@'; # substituted at 'make' time my $LOCAL_STATE_DIR = '@@LOCAL_STATE_DIR@@'; # substituted at 'make' time use lib '@@INSTALLSITELIB@@'; # substituted at 'make' time # Standard perl modules use File::Spec; use File::Path; use File::Copy; use Getopt::Long; use Pod::Usage; use strict; use warnings; BEGIN { # Locate locally installed SA libraries *without* using FindBin, which generates # warnings and causes more trouble than its worth. We don't need to be too # smart about this BTW. my @bin = File::Spec->splitpath($0); my $bin = ($bin[0] ? File::Spec->catpath(@bin[0..1]) : $bin[1]) # /home/jm/foo -> /home/jm || File::Spec->curdir; # foo -> . # check to make sure it wasn't just installed in the normal way. # note that ./lib/Mail/SpamAssassin.pm takes precedence, for # building SpamAssassin on a machine where an old version is installed. if (-e $bin.'/lib/Mail/SpamAssassin.pm' || !-e '@@INSTALLSITELIB@@/Mail/SpamAssassin.pm') { # Firstly, are we running "make test" in the "t" dir? the test files # *need* to use 'blib', so that 'use bytes' is removed for pre-5.6 perls # beforehand by the preproc. However, ./spamassassin does not, as the # preproc will have stripped out the "use rule files from cwd" code from # Mail::SpamAssassin. So we want to use blib just for the t scripts. if ( $bin eq '../' && -e '../blib/lib/Mail/SpamAssassin.pm' ) { unshift(@INC, '../blib/lib'); } else { # These are common paths where the SA libs might be found. foreach (qw(lib ../lib/site_perl ../lib/spamassassin ../share/spamassassin/lib)) { my $dir = File::Spec->catdir($bin, split('/', $_)); if(-f File::Spec->catfile($dir, "Mail", "SpamAssassin.pm")) { unshift(@INC, $dir); last; } } } } } # These are the non-standard required modules # Use the evals to avoid the annoying RPM requirement check eval { use Net::DNS; }; eval { use LWP::UserAgent; }; eval { use HTTP::Date qw(time2str); }; eval { use Archive::Tar; }; eval { use IO::Zlib; }; # These should already be available use Mail::SpamAssassin; use Digest::SHA1 qw/sha1_hex/; # Make the main dbg() accessible in our package w/o an extra function *dbg=\&Mail::SpamAssassin::dbg; sub dbg; # Clean up PATH appropriately Mail::SpamAssassin::Util::clean_path_in_taint_mode(); # Default list of GPG keys allowed to sign update releases # # pub 1024D/265FA05B 2003-06-09 # Key fingerprint = 26C9 00A4 6DD4 0CD5 AD24 F6D7 DEE0 1987 265F A05B # uid SpamAssassin Signing Key # sub 1024D/FC51569B 2003-08-21 # # pub 4096R/5244EC45 2005-12-20 # Key fingerprint = 5E54 1DC9 59CB 8BAC 7C78 DFDC 4056 A61A 5244 EC45 # uid updates.spamassassin.org Signing Key # sub 4096R/24F434CE 2005-12-20 # # note for gpg newbs: these are "long" gpg keyids. It's common to also # use the last 8 hex digits as a shorter keyid string. # my %valid_GPG = ( '26C900A46DD40CD5AD24F6D7DEE01987265FA05B' => 1, '0C2B1D7175B852C64B3CDC716C55397824F434CE' => 1, '5E541DC959CB8BAC7C78DFDC4056A61A5244EC45' => 1, ); # Default list of channels to update against # my @channels = ( 'updates.spamassassin.org' ); my %opt = (); @{$opt{'gpgkey'}} = (); @{$opt{'channel'}} = (); my $GPG_ENABLED = 1; $opt{'gpghomedir'} = File::Spec->catfile($LOCAL_RULES_DIR, 'sa-update-keys'); Getopt::Long::Configure( qw(bundling no_getopt_compat no_auto_abbrev no_ignore_case)); GetOptions( 'debug|D:s' => \$opt{'debug'}, 'version|V' => \$opt{'version'}, 'help|h|?' => \$opt{'help'}, # allow multiple of these on the commandline 'gpgkey=s' => $opt{'gpgkey'}, 'gpghomedir=s' => \$opt{'gpghomedir'}, 'channel=s' => $opt{'channel'}, 'import=s' => \$opt{'import'}, 'gpgkeyfile=s' => \$opt{'gpgkeyfile'}, 'channelfile=s' => \$opt{'channelfile'}, 'updatedir=s' => \$opt{'updatedir'}, 'gpg!' => \$GPG_ENABLED, # backward compatibility 'usegpg' => \$GPG_ENABLED, ) or print_usage_and_exit(); if ( defined $opt{'help'} ) { print_usage_and_exit("For more information read the sa-update man page.\n", 0); } if ( defined $opt{'version'} ) { print_version(); exit(0); } # Figure out what version of SpamAssassin we're using, and also figure out the # reverse of it for the DNS query. Handle x.yyyzzz as well as x.yz. my $SAVersion = $Mail::SpamAssassin::VERSION; if ($SAVersion =~ /^(\d+)\.(\d{3})(\d{3})$/) { $SAVersion = join(".", $1+0, $2+0, $3+0); } elsif ($SAVersion =~ /^(\d)\.(\d)(\d)$/) { $SAVersion = "$1.$2.$3"; } else { die "fatal: SpamAssassin version number '$SAVersion' is in an unknown format!\n"; } my $RevSAVersion = join(".", reverse split(/\./, $SAVersion)); # set debug areas, if any specified (only useful for command-line tools) $SAVersion =~ /^(\d+\.\d+)/; if ($1+0 > 3.0) { $opt{'debug'} ||= 'all' if (defined $opt{'debug'}); } else { $opt{'debug'} = defined $opt{'debug'}; } # Find the default site rule directory, also setup debugging and other M::SA bits my $SA = new Mail::SpamAssassin({ debug => $opt{'debug'}, local_tests_only => 1, dont_copy_prefs => 1, PREFIX => $PREFIX, DEF_RULES_DIR => $DEF_RULES_DIR, LOCAL_RULES_DIR => $LOCAL_RULES_DIR, LOCAL_STATE_DIR => $LOCAL_STATE_DIR, }); if (defined $opt{'updatedir'}) { $opt{'updatedir'} = Mail::SpamAssassin::Util::untaint_file_path($opt{'updatedir'}); } else { $opt{'updatedir'} = $SA->sed_path('__local_state_dir__/spamassassin/__version__'); } dbg("generic: sa-update version $VERSION"); dbg("generic: using update directory: $opt{'updatedir'}"); # doesn't really display useful things for this script, but we do want # a module/version listing, etc. sa-update may be used for older versions # of SA that don't include this function, so eval around it. eval { $SA->debug_diagnostics(); 1; }; $SA->finish(); # untaint the command-line args; since the root user supplied these, and # we're not a setuid script, we trust them foreach my $optkey (keys %opt) { next if ref $opt{$optkey}; my $untaint = $opt{$optkey}; next unless defined $untaint; $untaint =~ /^(.*)$/; $opt{$optkey} = $1; } my $GPGPath; # deal with gpg-related options if (@{$opt{'gpgkey'}}) { $GPG_ENABLED = 1; foreach my $key (@{$opt{'gpgkey'}}) { unless (is_valid_gpg_key_id($key)) { dbg("gpg: invalid gpgkey parameter $key"); next; } $key = uc $key; dbg("gpg: adding key id $key"); $valid_GPG{$key} = 1; } } if (defined $opt{'gpgkeyfile'}) { $GPG_ENABLED = 1; unless (open(GPG, $opt{'gpgkeyfile'})) { die "Can't open ".$opt{'gpgkeyfile'}." for reading: $!\n"; } dbg("gpg: reading in gpgfile ".$opt{'gpgkeyfile'}); while(my $key = ) { unless (is_valid_gpg_key_id($key)) { dbg("gpg: invalid key id $key"); next; } $key = uc $key; dbg("gpg: adding key id $key"); $valid_GPG{$key} = 1; } close(GPG); } if ( $opt{'import'} ) { my $ex = import_gpg_key($opt{'import'}); exit $ex; } # does the sa-update keyring exist? if not, import it if ($GPG_ENABLED) { if(!-f File::Spec->catfile($opt{'gpghomedir'}, "secring.gpg")) { import_default_keyring(); # attempt to continue even if this fails, anyway } } # convert fingerprint gpg ids to keyids foreach (keys %valid_GPG) { my $id = substr $_, -8; $valid_GPG{$id} = 1; } # Deal with channel-related options if (defined $opt{'channel'} && scalar @{$opt{'channel'}} > 0) { @channels = @{$opt{'channel'}}; } if (defined $opt{'channelfile'}) { unless (open(CHAN, $opt{'channelfile'})) { die "Can't open ".$opt{'channelfile'}." for reading: $!\n"; } dbg("channel: reading in channelfile ".$opt{'channelfile'}); @channels = (); while(my $chan = ) { chomp $chan; $chan = lc $chan; dbg("channel: adding $chan"); push(@channels, $chan); } close(CHAN); } # untaint the channel listing for(my $ind = 0; $ind < @channels; $ind++) { if ($channels[$ind] =~ /^([a-zA-Z0-9._-]+)$/) { $channels[$ind] = $1; } else { splice @channels, $ind, 1; } } # find GPG in the PATH if ($GPG_ENABLED) { $GPGPath = find_gpg_path(); dbg("gpg: release trusted key id list: ".join(" ", keys %valid_GPG)); } my $res = Net::DNS::Resolver->new(); my $ua = LWP::UserAgent->new(); $ua->agent("sa-update/$VERSION"); $ua->timeout(60); # a good long timeout; 10 is too short for Coral! $ua->env_proxy; # Generate a temporary file to put channel content in for later use ... my ($content_file, $tfh) = Mail::SpamAssassin::Util::secure_tmpfile(); close($tfh); # and another, for the new config file my ($newcf_file, $tfh2) = Mail::SpamAssassin::Util::secure_tmpfile(); close($tfh2); # by default, exit code is 1, to indicate no updates occurred my $exit = 1; # Go ahead and loop through all of the channels foreach my $channel (@channels) { dbg("channel: attempting channel $channel"); # Convert the channel to a nice-for-filesystem version my $nicechannel = $channel; $nicechannel =~ tr/A-Za-z0-9-/_/cs; my $UPDDir = "$opt{'updatedir'}/$nicechannel"; my $UPDTmp = "$opt{'updatedir'}/$nicechannel.tmp"; my $CFFile = "$UPDDir.cf"; my $CFFTmp = $newcf_file; dbg("channel: update directory $UPDDir"); dbg("channel: update tmp directory $UPDTmp"); dbg("channel: channel cf file $CFFile"); dbg("channel: channel tmp cf file $CFFTmp"); # try to read metadata from channel.cf file my $currentV = -1; if (open(CF, $CFFile)) { while() { last unless /^# UPDATE\s+([A-Za-z]+)\s+(\S+)/; my($type, $value) = (lc $1,$2); dbg("channel: metadata $type = $value"); if ($type eq 'version') { $value =~ /^(\d+)/; $currentV = $1; } } close(CF); } # Setup the channel version DNS query my $DNSQ = "$RevSAVersion.$channel"; my $newV; my $dnsV = do_txt_query($DNSQ); if (defined $dnsV && $dnsV =~ /^(\d+)/) { $newV = $1 if (!defined $newV || $1 > $newV); dbg("dns: $DNSQ => $dnsV, parsed as $1"); } # Not getting a response isn't a failure, there may just not be any updates # for this SA version yet. unless (defined $newV) { dbg("channel: no updates available, skipping channel"); next; } # If this channel hasn't been installed before, or it's out of date, # keep going. Otherwise, skip it. if ($currentV >= $newV) { dbg("channel: current version is $currentV, new version is $newV, skipping channel"); next; } # ensure dirs exist, upfront unless (-d $UPDDir) { dbg("channel: creating $UPDDir"); mkpath([$UPDDir], 0, 0777) or die "fatal: can't create $UPDDir: $!\n"; } unless (-d $UPDTmp) { dbg("channel: creating $UPDTmp"); mkpath([$UPDTmp], 0, 0777) or die "fatal: can't create $UPDTmp: $!\n"; } # copy the MIRRORED.BY file to the tmpdir, if it exists if (-f "$UPDDir/MIRRORED.BY") { unlink("$UPDTmp/MIRRORED.BY"); my ($x, $atime, $mtime); ($x,$x,$x,$x,$x,$x,$x,$x,$atime,$mtime,$x) = stat "$UPDDir/MIRRORED.BY"; copy("$UPDDir/MIRRORED.BY", "$UPDTmp/MIRRORED.BY") or die "fatal: cannot copy $UPDDir/MIRRORED.BY to $UPDTmp/MIRRORED.BY"; # ensure modtimes match utime($atime, $mtime, "$UPDTmp/MIRRORED.BY"); } else { # We don't currently have the list of mirrors, so go grab it. dbg("channel: no MIRRORED.BY file available"); my $mirror = do_txt_query("mirrors.$channel"); unless ($mirror) { warn "error: no mirror data available for channel $channel\n"; channel_failed("channel: MIRRORED.BY file location was not in DNS"); } $mirror = http_get($mirror); unless ($mirror) { warn "error: no mirror data available for channel $channel\n"; channel_failed("channel: MIRRORED.BY contents were missing"); next; } unless (open(MIR, ">$UPDTmp/MIRRORED.BY")) { warn "error: can't create mirrors file: $!\n"; channel_failed("channel: MIRRORED.BY creation failure"); next; } print MIR $mirror; close(MIR); dbg("channel: MIRRORED.BY file retrieved"); } # Read in the list of mirrors unless (open(MIR, "$UPDTmp/MIRRORED.BY")) { warn "error: can't read mirrors file: $!\n"; channel_failed("channel: MIRRORED.BY file is unreadable"); next; } dbg("channel: reading MIRRORED.BY file"); my %mirrors = (); while(my $mirror = ) { next if ($mirror =~ /^#/); # explicitly skip comments # We only support HTTP right now if ($mirror !~ m@^http://@i) { dbg("channel: skipping non-HTTP mirror: $mirror"); next; } chomp $mirror; my @data; dbg("channel: found mirror $mirror"); ($mirror,@data) = split(/\s+/, $mirror); $mirror =~ s@/+$@@; # http://example.com/updates/ -> .../updates $mirrors{$mirror}->{weight} = 1; foreach (@data) { my($k,$v) = split(/=/, $_, 2); $mirrors{$mirror}->{$k} = $v; } } close(MIR); unless (keys %mirrors) { warn "error: no mirrors available for channel $channel\n"; channel_failed("channel: no mirrors available"); next; } # remember the mtime of the file so we can IMS GET later on my $mirby_time = (stat("$UPDTmp/MIRRORED.BY"))[9]; # Now that we've laid the foundation, go grab the appropriate files # my $content; my $SHA1; my $GPG; my $mirby; # Loop through all available mirrors, choose from them randomly # if the archive get fails, choose another mirror, # if the get for the sha1 or gpg signature files, the channel fails while (my $mirror = choose_mirror(\%mirrors)) { # Grab the data hash for this mirror, then remove it from the list my $mirror_info = $mirrors{$mirror}; delete $mirrors{$mirror}; dbg("channel: selected mirror $mirror"); # Actual archive file $content = http_get("$mirror/$newV.tar.gz"); next unless $content; # SHA1 of the archive file $SHA1 = http_get("$mirror/$newV.tar.gz.sha1"); last unless $SHA1; # if GPG is enabled, the GPG detached signature of the archive file if ($GPG_ENABLED) { $GPG = http_get("$mirror/$newV.tar.gz.asc"); last unless $GPG; } # try to update our list of mirrors. # a failure here doesn't cause channel failure. $mirby = http_get("$mirror/MIRRORED.BY", $mirby_time); last; } unless ($content && $SHA1 && (!$GPG_ENABLED || $GPG)) { warn "error: channel $channel has no working mirrors\n"; channel_failed("channel: could not find working mirror"); next; } # Validate the SHA1 signature before going forward with more complicated # operations. # The SHA1 file may be "signature filename" ala sha1sum, just use the signature $SHA1 =~ /^([a-fA-F0-9]{40})/; $SHA1 = $1 || 'INVALID'; my $digest = sha1_hex($content); dbg("sha1: verification expected: $SHA1"); dbg("sha1: verification got : $digest"); unless ($digest eq $SHA1) { warn "error: can't verify SHA1 signature\n"; channel_failed("channel: SHA1 verification failed"); next; } # Write the content out to a temp file for GPG/Archive::Tar interaction dbg("channel: populating temp content file"); open(TMP, ">$content_file") || die "fatal: can't write to content temp file $content_file: $!\n"; binmode TMP; print TMP $content; close(TMP); # to sign : gpg -bas file # to verify: gpg --verify --batch --no-tty --status-fd=1 -q --logger-fd=1 file.asc file # look for : [GNUPG:] GOODSIG 6C55397824F434CE updates.spamassassin.org [...] # [GNUPG:] VALIDSIG 0C2B1D7175B852C64B3CDC716C55397824F434CE [...] # [GNUPG:] NO_PUBKEY 6C55397824F434CE if ($GPG) { dbg("gpg: populating temp signature file"); my $sig_file; ($sig_file, $tfh) = Mail::SpamAssassin::Util::secure_tmpfile(); binmode $tfh; print $tfh $GPG; close($tfh); dbg("gpg: calling gpg"); my $gpghome = interpolate_gpghomedir(); # TODO: we could also use "--keyserver pgp.mit.edu" or similar, # to autodownload missing keys... my $CMD = "$GPGPath $gpghome --verify --batch ". "--no-tty --status-fd=1 -q --logger-fd=1"; unless (open(CMD, "$CMD $sig_file $content_file|")) { unlink $sig_file || warn "error: can't unlink $sig_file: $!\n"; die "fatal: couldn't execute $GPGPath: $!\n"; } # Determine the fate of the signature my $signer = ''; my $missingkeys = ''; while(my $GNUPG = ) { chop $GNUPG; dbg ("gpg: $GNUPG"); if ($GNUPG =~ /^gpg: fatal:/) { warn $GNUPG."\n"; # report bad news } if ($GNUPG =~ /^\Q[GNUPG:]\E NO_PUBKEY \S+(\S{8})$/) { $missingkeys .= $1." "; } next unless ($GNUPG =~ /^\Q[GNUPG:]\E (?:VALID|GOOD)SIG (\S{8,40})/); my $key = $1; # we want either a keyid (8) or a fingerprint (40) if (length $key > 8 && length $key < 40) { substr($key, 8) = ''; } # use the longest match we can find $signer = $key if (length $key > length $signer); } close(CMD); unlink $sig_file || warn "Can't unlink $sig_file: $!\n"; if ($signer) { my $keyid = substr $signer, -8; dbg("gpg: found signature made by key $signer"); if (exists $valid_GPG{$signer}) { dbg("gpg: key id $signer is release trusted"); } elsif (exists $valid_GPG{$keyid}) { dbg("gpg: key id $keyid is release trusted"); } else { dbg("gpg: key id $keyid is not release trusted"); $signer = undef; } } unless ($signer) { warn "error: GPG validation failed!\n"; if ($missingkeys) { warn <$UPDTmp/MIRRORED.BY")) { print MBY $mirby; close(MBY); } else { warn "error: can't write new MIRRORED.BY file: $!\n"; } } dbg("channel: cleaning out update directory"); if (!clean_update_dir($UPDTmp)) { channel_failed("channel: attempt to clean update dir failed"); next; } unlink $CFFTmp || warn "error: can't remove file $CFFTmp: $!\n"; $tfh = IO::Zlib->new($content_file, "rb"); die "fatal: couldn't read content tmpfile $content_file: $!\n" unless $tfh; my $tar = Archive::Tar->new($tfh); die "fatal: couldn't create Archive::Tar object!\n" unless $tar; dbg("channel: extracting archive"); my $ret = taint_safe_archive_extract($UPDTmp, $tar); unless ($ret) { close($tfh); warn "error: couldn't extract the tar archive!\n"; channel_failed("channel: archive extraction failed"); next; } close($tfh); # check --lint if (!lint_check_dir($UPDTmp)) { warn "error: lint check of update failed! channel failed\n"; channel_failed("channel: lint check of update failed"); next; } # OK, lint passed. now create the update config file dbg("channel: creating update config file"); unless (open(CF, ">$CFFTmp")) { die "fatal: can't create new channel cf $CFFTmp: $!\n"; } # Put in whatever metadata we need print CF "# UPDATE version $newV\n"; # try to figure out the relative path dir name my $relativeDir = $UPDDir; $UPDDir =~ m,/([^/]+)/*$,; if ($1) { $relativeDir = $1; } dbg("channel: updatedir=$UPDDir relativepath=$relativeDir"); my @files = (); # now include *.cf unless (opendir(DIR, $UPDTmp)) { die "fatal: can't access $UPDTmp: $!\n"; } while(my $file = readdir(DIR)) { $file =~ /^([^\/]+)$/; # untaint $file = $1; next unless (-f "$UPDTmp/$file"); next if ($file eq "MIRRORED.BY"); # handled separately dbg("channel: adding $file"); if ($file =~ /\.cf$/) { print CF "include $relativeDir/$file\n"; } push (@files, $file); } closedir(DIR); if (!close(CF)) { warn "write to $CFFTmp failed! attempting to continue"; channel_failed("write to $CFFTmp failed"); next; } dbg("channel: applying changes to $UPDDir..."); # too late to stop now! At this stage, if there are errors, # we have to attempt to carry on regardless, since we've already # blown away the old ruleset. # clean out the "real" update dir, and copy from tmp areas if (!clean_update_dir($UPDDir)) { warn("channel: attempt to rm contents failed, attempting to continue anyway"); } foreach my $file (@files) { rename("$UPDTmp/$file", "$UPDDir/$file") or warn "rename $UPDTmp/$file $UPDDir/$file failed: $!"; } unlink $CFFile || warn "error: can't remove file $CFFile: $!\n"; cross_fs_rename($CFFTmp, $CFFile) or warn "rename $CFFTmp $CFFile failed: $!"; unlink("$UPDDir/MIRRORED.BY"); rename("$UPDTmp/MIRRORED.BY", "$UPDDir/MIRRORED.BY") or warn "error: couldn't mv $UPDTmp/MIRRORED.BY $UPDDir/MIRRORED.BY: $!\n"; rmdir $UPDTmp; $exit = 0; # "exit 0" means an update occurred dbg("channel: update complete"); } # clear out the temp files if they still exist foreach ( $newcf_file, $content_file ) { if (-e $_) { unlink $_ || warn "error: can't remove file $_: $!\n"; } } dbg("diag: updates complete, exiting with code $exit"); exit $exit; sub channel_failed { my $reason = shift; warn("$reason, channel failed\n"); if ($exit > 4) { $exit++; } else { $exit = 4; # 4 or higher means channel failed } } sub taint_safe_archive_extract { my $todir = shift; my $tar = shift; # stupid Archive::Tar is not natively taint-safe! duh. # return $tar->extract(); # instead, get the file list, untaint, and extract one-by-one. my @files = $tar->list_files(); foreach my $file (@files) { next if ($file =~ /^\/$/); # ignore dirs $file =~ /^([-\.\,\/a-zA-Z0-9_]+)$/; my $outfname = $1; $outfname =~ s/\.\.\//__\//gs; # avoid "../" dir traversal attacks $outfname = "$todir/$outfname"; dbg "extracting: $outfname"; if (open OUT, ">".$outfname) { print OUT $tar->get_content($file); if (!close OUT) { warn "error: write failed to $outfname: $!"; goto failed; } } else { warn "error: failed to open $outfname for write: $!"; goto failed; } } return @files; failed: return; # undef = failure } # Do a generic TXT query sub do_txt_query { my($query) = shift; my $RR = $res->query($query, 'TXT'); my $result = ''; if ($RR) { foreach my $rr ($RR->answer) { my $text = $rr->rdatastr; $text =~ /^"(.*)"$/; if (length $result) { $result .= " $1"; } else { $result = $1; } } } else { dbg("dns: query failed: $query => " . $res->errorstring); } return $result; } # Do a GET request via HTTP for a certain URL # Use the optional time_t value to do an IMS GET sub http_get { my($url, $ims) = @_; my $response; my $text; # retry 3 times; this works better with Coral foreach my $retries (1 .. 3) { my $request = HTTP::Request->new("GET"); $request->url($url); if (defined $ims) { my $str = time2str($ims); $request->header('If-Modified-Since', $str); dbg("http: IMS GET request, $url, $str"); } else { dbg("http: GET request, $url"); } $response = $ua->request($request); if ($response->is_success) { return $response->content; } # could be a "304 not modified" or similar. # TODO: should use a special return type for "not modified" here # instead of overloading the failure return type if ($ims && $response->status_line =~ /^3/) { return; } # include the text in the debug output; it's useful in some cases, # e.g. proxies that require authentication, diagnosing fascist # filtering false positives, etc. $text = $response->content; $text ||= "(no body)"; $text =~ s/\s+/ /gs; dbg ("http: request failed, retrying: " . $response->status_line.": ".$text); } # this should be a user-readable warning without --debug warn "http: request failed: " . $response->status_line.": ".$text."\n"; return; } # choose a random integer between 0 and the total weight of all mirrors # loop through the mirrors from largest to smallest weight # if random number is < largest weight, use it # otherwise, random number -= largest, remove mirror from list, try again # eventually, there'll just be 1 mirror left in $mirrors[0] and it'll be used # sub choose_mirror { my($mirror_list) = @_; # Sort the mirror list by reverse weight (largest first) my @mirrors = sort { $mirror_list->{$b}->{weight} <=> $mirror_list->{$a}->{weight} } keys %{$mirror_list}; return unless @mirrors; if (keys %{$mirror_list} > 1) { # Figure out the total weight my $weight_total = 0; foreach (@mirrors) { $weight_total += $mirror_list->{$_}->{weight}; } # Pick a random int my $value = int(rand($weight_total)); # loop until we find the right mirror, or there's only 1 left while (@mirrors > 1) { if ($value < $mirror_list->{$mirrors[0]}->{weight}) { last; } $value -= $mirror_list->{$mirrors[0]}->{weight}; shift @mirrors; } } return $mirrors[0]; } sub print_version { print "sa-update version $VERSION\n" . " running on Perl version " . join(".", map { $_||=0; $_*1 } ($] =~ /(\d)\.(\d{3})(\d{3})?/ )) . "\n"; } sub print_usage_and_exit { my ( $message, $exitval ) = @_; $exitval ||= 64; if ($exitval == 0) { print_version(); print("\n"); } pod2usage( -verbose => 0, -message => $message, -exitval => $exitval, ); } sub usage { my ( $verbose, $message ) = @_; print "sa-update version $VERSION\n"; pod2usage( -verbose => $verbose, -message => $message, -exitval => 64 ); } sub find_gpg_path { dbg("gpg: Searching for 'gpg' in ".$ENV{'PATH'}); my $path; foreach my $dir (split(/:/, $ENV{'PATH'})) { $dir = File::Spec->catfile($dir, 'gpg'); if (-x $dir) { $path = $dir; last; } } die "fatal: couldn't find GPG in \$PATH\n" unless ($path); dbg("gpg: found $path"); return $path; } sub interpolate_gpghomedir { my $gpghome = ''; if ($opt{'gpghomedir'}) { $gpghome = $opt{'gpghomedir'}; $gpghome =~ s/\'/\\\'/gs; $gpghome = "--homedir='$gpghome'"; } return $gpghome; } sub import_gpg_key { my $keyfile = shift; $GPGPath = find_gpg_path(); my $gpghome = interpolate_gpghomedir(); my $CMD = "$GPGPath $gpghome --batch ". "--no-tty --status-fd=1 -q --logger-fd=1 --import"; unless (open(CMD, "$CMD $keyfile|")) { die "fatal: couldn't execute $GPGPath: $!\n"; } # Determine the fate of the signature while(my $GNUPG = ) { chop $GNUPG; dbg ("gpg: $GNUPG"); if ($GNUPG =~ /^gpg: /) { warn $GNUPG."\n"; # report bad news } if ($GNUPG =~ /^IMPORTED /) { print "sa-update --import: success. $GNUPG\n"; } } close(CMD); return ($? >> 8); } sub import_default_keyring { my $defkey = File::Spec->catfile ($DEF_RULES_DIR, "sa-update-pubkey.txt"); return unless (-f $defkey); print "sa-update: importing default keyring to '".$opt{gpghomedir}."'...\n"; unless (-d $opt{gpghomedir}) { # use 0700 to avoid "unsafe permissions" warning mkdir ($opt{gpghomedir}, 0700) or die "cannot mkdir $opt{gpghomedir}: $!"; } import_gpg_key($defkey); } sub is_valid_gpg_key_id { # either a keyid (8 bytes) or a fingerprint (40 bytes) return ($_[0] =~ /^[a-fA-F0-9]+$/ && (length $_[0] == 8 || length $_[0] == 40)); } sub clean_update_dir { my $dir = shift; unless (opendir(DIR, $dir)) { warn "error: can't readdir $dir: $!\n"; dbg("channel: attempt to readdir failed, channel failed"); return 0; } while(my $file = readdir(DIR)) { next unless (-f "$dir/$file"); next if ($file eq 'MIRRORED.BY'); dbg("channel: unlinking $file"); $file =~ /^([^\/]+)$/; # untaint $file = $1; if (!unlink "$dir/$file") { warn "error: can't remove file $dir/$file: $!\n"; return 0; } } closedir(DIR); return 1; } sub lint_check_dir { my $dir = shift; # due to the Logger module's globalness (all M::SA objects share the same # Logger setup), we can't change the debug level here to only include # "config" or otherwise be more terse. :( my $spamtest = new Mail::SpamAssassin( { rules_filename => $dir, site_rules_filename => "$dir/doesnotexist", userprefs_filename => "$dir/doesnotexist", local_tests_only => 1, dont_copy_prefs => 1, PREFIX => $PREFIX, DEF_RULES_DIR => $DEF_RULES_DIR, LOCAL_RULES_DIR => $LOCAL_RULES_DIR, LOCAL_STATE_DIR => $LOCAL_STATE_DIR, }); # need to kluge disabling bayes since it may try to expire the DB, and # without the proper config it's not going to be good. $spamtest->{conf}->{use_bayes} = 0; my $res = $spamtest->lint_rules(); $spamtest->finish(); return $res == 0; } # a version of rename() that can cope with renaming files across filesystems, # as mv(1) can. sub cross_fs_rename { my ($from, $to) = @_; my $ret = rename ($from, $to); if ($ret) { return $ret; # success first time! great } # try a copy if (!copy($from, $to)) { # copy failed, too. we have no further fallbacks; return the rename() # failure code return $ret; } # copy succeeded, we're good; remove the source, and return success unlink($from); return 1; } # --------------------------------------------------------------------------- =head1 NAME sa-update - automate SpamAssassin rule updates =head1 SYNOPSIS B [options] Options: --updatedir path Directory to place updates, defaults to the SpamAssassin site rules directory (def: /var/lib/spamassassin/) --channel channel Retrieve updates from this channel Use multiple times for multiple channels --channelfile file Retrieve updates from the channels in the file --gpgkey key Trust the key id to sign releases Use multiple times for multiple keys --gpgkeyfile file Trust the key ids in the file to sign releases --gpghomedir path Store the GPG keyring in this directory --gpg and --nogpg Use (or do not use) GPG to verify updates (--gpg is assumed by use of the above --gpgkey and --gpgkeyfile options) -D, --debug [area=n,...] Print debugging messages -V, --version Print version -h, --help Print usage message =head1 DESCRIPTION sa-update automates the process of downloading and installing new rules and configuration, based on channels. The default channel is I, which has updated rules since the previous release. Update archives are verified by default using SHA1 hashes and GPG signatures. =head1 OPTIONS =over 4 =item B<--updatedir> By default, C will use the system-wide rules update directory: @@LOCAL_STATE_DIR@@/spamassassin/@@VERSION@@ If the updates should be stored in another location, specify it here. =item B<--channel> sa-update can update multiple channels at the same time. By default, it will only access "updates.spamassassin.org", but more channels can be specified via this option. If there are multiple additional channels, use the option multiple times, once per channel. i.e.: sa-update --channel foo.example.com --channel bar.example.com =item B<--channelfile> Similar to the B<--channel> option, except specify the additional channels in a file instead of on the commandline. This is extremely useful when there are a lot of additional channels. =item B<--gpg>, B<--nogpg> sa-update by default will verify update archives by use of a SHA1 checksum and GPG signature. SHA1 hashes can verify whether or not the downloaded archive has been corrupted, but it does not offer any form of security regarding whether or not the downloaded archive is legitimate (aka: non-modifed by evildoers). GPG verification of the archive is used to solve that problem. If you wish to skip GPG verification, you can use the B<--nogpg> option to disable its use. Use of the following gpgkey-related options will override B<--nogpg> and keep GPG verification enabled. Note: Currently, only GPG itself is supported (ie: not PGP). v1.2 has been tested, although later versions ought to work as well. =item B<--gpgkey> sa-update has the concept of "release trusted" GPG keys. When an archive is downloaded and the signature verified, sa-update requires that the signature be from one of these "release trusted" keys or else verification fails. This prevents third parties from manipulating the files on a mirror, for instance, and signing with their own key. By default, sa-update trusts key id C<265FA05B>, which is the standard SpamAssassin release key. Use this option to add more trusted keys. For multiple keys, use the option multiple times. i.e.: sa-update --gpgkey E580B363 --gpgkey 298BC7D0 Note: use of this option automatically enables GPG verification. =item B<--gpgkeyfile> Similar to the B<--gpgkey> option, except specify the additional keys in a file instead of on the commandline. This is extremely useful when there are a lot of additional keys that you wish to trust. =item B<--gpghomedir> Specify a directory path to use as a storage area for the C GPG keyring. By default, this is @@LOCAL_RULES_DIR@@/sa-update-keys =item B<-D> [I], B<--debug> [I] Produce debugging output. If no areas are listed, all debugging information is printed. Diagnostic output can also be enabled for each area individually; I is the area of the code to instrument. For example, to produce diagnostic output on channel, gpg, and http, use: sa-update -D channel,gpg,http For more information about which areas (also known as channels) are available, please see the documentation at: C =item B<-h>, B<--help> Print help message and exit. =item B<-V>, B<--version> Print sa-update version and exit. =back =head1 EXIT CODES An exit code of C<0> means an update was available, and was downloaded and installed successfully. An exit code of C<1> means no fresh updates were available. An exit code of C<4> or higher, indicates that errors occurred while attempting to download and extract updates. =head1 SEE ALSO Mail::SpamAssassin(3) Mail::SpamAssassin::Conf(3) spamassassin(1) spamd(1) =head1 PREREQUESITES C =head1 BUGS See =head1 AUTHORS The Apache SpamAssassin(tm) Project =head1 COPYRIGHT SpamAssassin is distributed under the Apache License, Version 2.0, as described in the file C included with the distribution. =cut