X-Git-Url: https://vcs.fsf.org/?p=exim.git;a=blobdiff_plain;f=src%2Fsrc%2Feximstats.src;h=5e1a0847b58fe8424c630c03e5f298da87a89c8e;hp=223c7a7b245b306d1204ed484acb6891366974fa;hb=4c2efd7a1bc5b018f2e05a0d739fd856967e3de7;hpb=059ec3d9952740285fb1ebf47961b8aca2eb1b4a diff --git a/src/src/eximstats.src b/src/src/eximstats.src index 223c7a7b2..5e1a0847b 100644 --- a/src/src/eximstats.src +++ b/src/src/eximstats.src @@ -1,7 +1,6 @@ -#!PERL_COMMAND -w -# $Cambridge: exim/src/src/eximstats.src,v 1.1 2004/10/07 10:39:01 ph10 Exp $ +#!PERL_COMMAND -# Copyright (c) 2001 University of Cambridge. +# Copyright (c) 2001-2017 University of Cambridge. # See the file NOTICE for conditions of use and distribution. # Perl script to generate statistics from one or more Exim log files. @@ -74,7 +73,7 @@ # 2001-10-21 Removed -domain flag and added -bydomain, -byhost, and -byemail. # We now generate our main parsing subroutine as an eval statement # which improves performance dramatically when not all the results -# are required. We also cache the last timestamp to time convertion. +# are required. We also cache the last timestamp to time conversion. # # NOTE: 'Top 50 destinations by (message count|volume)' lines are # now 'Top N (host|email|domain) destinations by (message count|volume)' @@ -143,7 +142,7 @@ # in HTML output. Also added code to convert them back with -merge. # Fixed timestamp offsets to convert to seconds rather than minutes. # Updated -merge to work with output files using timezones. -# Added cacheing to speed up the calculation of timezone offsets. +# Added caching to speed up the calculation of timezone offsets. # # 2003-02-07 V1.25 Steve Campbell # Optimised the usage of mktime() in the seconds subroutine. @@ -163,7 +162,7 @@ # Bernard Massot. # # 2003-06-03 V1.28 John Newman -# Added in the ability to skip over the parsing and evaulation of +# Added in the ability to skip over the parsing and evaluation of # specific transports as passed to eximstats via the new "-nt/.../" # command line argument. This new switch allows the viewing of # not more accurate statistics but more applicable statistics when @@ -181,18 +180,159 @@ # 2004-02-20 V1.31 Andrea Balzi # Only show the Local Sender/Destination links if the tables exist. # - +# 2004-07-05 V1.32 Steve Campbell +# Fix '-merge -h0' divide by zero error. +# +# 2004-07-15 V1.33 Steve Campbell +# Documentation update - I've converted the subroutine +# documentation from POD to comments. +# +# 2004-12-10 V1.34 Steve Campbell +# Eximstats can now parse syslog lines as well as mainlog lines. +# +# 2004-12-20 V1.35 Wouter Verhelst +# Pie charts by volume were actually generated by count. Fixed. +# +# 2005-02-07 V1.36 Gregor Herrmann / Steve Campbell +# Added average sizes to HTML Top tables. +# +# 2005-04-26 V1.37 Frank Heydlauf +# Added -xls and the ability to specify output files. +# +# 2005-04-29 V1.38 Steve Campbell +# Use FileHandles for outputting results. +# Allow any combination of xls, txt, and html output. +# Fixed display of large numbers with -nvr option +# Fixed merging of reports with empty tables. +# +# 2005-05-27 V1.39 Steve Campbell +# Added the -include_original_destination flag +# Removed tabs and trailing whitespace. +# +# 2005-06-03 V1.40 Steve Campbell +# Whilst parsing the mainlog(s), store information about +# the messages in a hash of arrays rather than using +# individual hashes. This is a bit cleaner and results in +# dramatic memory savings, albeit at a slight CPU cost. +# +# 2005-06-15 V1.41 Steve Campbell +# Added the -show_rt flag. +# Added the -show_dt flag. +# +# 2005-06-24 V1.42 Steve Campbell +# Added Histograms for user specified patterns. +# +# 2005-06-30 V1.43 Steve Campbell +# Bug fix for V1.42 with -h0 specified. Spotted by Chris Lear. +# +# 2005-07-26 V1.44 Steve Campbell +# Use a glob alias rather than an array ref in the generated +# parser. This improves both readability and performance. +# +# 2005-09-30 V1.45 Marco Gaiarin / Steve Campbell +# Collect SpamAssassin and rejection statistics. +# Don't display local sender or destination tables unless +# there is data to show. +# Added average volumes into the top table text output. +# +# 2006-02-07 V1.46 Steve Campbell +# Collect data on the number of addresses (recipients) +# as well as the number of messages. +# +# 2006-05-05 V1.47 Steve Campbell +# Added 'Message too big' to the list of mail rejection +# reasons (thanks to Marco Gaiarin). +# +# 2006-06-05 V1.48 Steve Campbell +# Mainlog lines which have GMT offsets and are too short to +# have a flag are now skipped. +# +# 2006-11-10 V1.49 Alain Williams +# Added the -emptyok flag. +# +# 2006-11-16 V1.50 Steve Campbell +# Fixes for obtaining the IP address from reject messages. +# +# 2006-11-27 V1.51 Steve Campbell +# Another update for obtaining the IP address from reject messages. +# +# 2006-11-27 V1.52 Steve Campbell +# Tally any reject message containing SpamAssassin. +# +# 2007-01-31 V1.53 Philip Hazel +# Allow for [pid] after date in log lines +# +# 2007-02-14 V1.54 Daniel Tiefnig +# Improved the '($parent) =' pattern match. +# +# 2007-03-19 V1.55 Steve Campbell +# Differentiate between permanent and temporary rejects. +# +# 2007-03-29 V1.56 Jez Hancock +# Fixed some broken HTML links and added missing column headers. +# +# 2007-03-30 V1.57 Steve Campbell +# Fixed Grand Total Summary Domains, Edomains, and Email columns +# for Rejects, Temp Rejects, Ham, and Spam rows. +# +# 2007-04-11 V1.58 Steve Campbell +# Fix to get <> and blackhole to show in edomain tables. +# +# 2007-09-20 V1.59 Steve Campbell +# Added the -bylocaldomain option +# +# 2007-09-20 V1.60 Heiko Schlittermann +# Fix for misinterpreted log lines +# +# 2013-01-14 V1.61 Steve Campbell +# Watch out for senders sending "HELO [IpAddr]" +# +# +# For documentation on the logfile format, see +# http://www.exim.org/exim-html-4.50/doc/html/spec_48.html#IX2793 =head1 NAME -eximstats - generates statistics from Exim mainlog files. +eximstats - generates statistics from Exim mainlog or syslog files. =head1 SYNOPSIS - eximstats [Options] mainlog1 mainlog2 ... > report.txt + eximstats [Output] [Options] mainlog1 mainlog2 ... eximstats -merge [Options] report.1.txt report.2.txt ... > weekly_report.txt -Options: +=head2 Output: + +=over 4 + +=item B<-txt> + +Output the results in plain text to STDOUT. + +=item B<-txt>=I + +Output the results in plain text. Filename '-' for STDOUT is accepted. + +=item B<-html> + +Output the results in HTML to STDOUT. + +=item B<-html>=I + +Output the results in HTML. Filename '-' for STDOUT is accepted. + +=item B<-xls> + +Output the results in Excel compatible Format to STDOUT. +Requires the Spreadsheet::WriteExcel CPAN module. + +=item B<-xls>=I + +Output the results in Excel compatible format. Filename '-' for STDOUT is accepted. + + +=back + +=head2 Options: =over 4 @@ -240,6 +380,36 @@ Omit local sources/destinations in top listing. Include remote users in the top source/destination listings. +=item B<-include_original_destination> + +Include the original destination email addresses rather than just +using the final ones. +Useful for finding out which of your mailing lists are receiving mail. + +=item B<-show_dt>I + +Show the delivery times (B
)for all the messages. + +Exim must have been configured to use the +deliver_time logging option +for this option to work. + +I is an optional list of times. Eg -show_dt1,2,4,8 will show +the number of messages with delivery times under 1 second, 2 seconds, 4 seconds, +8 seconds, and over 8 seconds. + +=item B<-show_rt>I + +Show the receipt times for all the messages. The receipt time is +defined as the Completed hh:mm:ss - queue_time_overall - the Receipt hh:mm:ss. +These figures will be skewed by pipelined messages so might not be that useful. + +Exim must have been configured to use the +queue_time_overall logging option +for this option to work. + +I is an optional list of times. Eg -show_rt1,2,4,8 will show +the number of messages with receipt times under 1 second, 2 seconds, 4 seconds, +8 seconds, and over 8 seconds. + =item B<-byhost> Show results by sending host. This may be combined with @@ -306,13 +476,10 @@ round to the same value. =back -=item B<-html> - -Output the results in HTML. - =item B<-charts> Create graphical charts to be displayed in HTML output. +Only valid in combination with I<-html>. This requires the following modules which can be obtained from http://www.cpan.org/modules/01modules.index.html @@ -343,6 +510,11 @@ Create the charts in the directory Specify the relative directory for the "img src=" tags from where to include the charts +=item B<-emptyok> + +Specify that it's OK to not find any valid log lines. Without this +we will output an error message if we don't find any. + =item B<-d> Debug flag. This outputs the eval()'d parser onto STDOUT which makes it @@ -353,15 +525,15 @@ title! =head1 DESCRIPTION -Eximstats parses exim mainlog files and outputs a statistical +Eximstats parses exim mainlog and syslog files to output a statistical analysis of the messages processed. By default, a text -analysis is generated, but you can request an html analysis -by using the B<-html> flag. See the help (B<-help>) to learn +analysis is generated, but you can request other output formats +using flags. See the help (B<-help>) to learn about how to create charts from the tables. =head1 AUTHOR -There is a web site at http://www.exim.org - this contains details of the +There is a website at https://www.exim.org - this contains details of the mailing list exim-users@exim.org. =head1 TO DO @@ -369,26 +541,36 @@ mailing list exim-users@exim.org. This program does not perfectly handle messages whose received and delivered log lines are in different files, which can happen when you have multiple mail servers and a message cannot be -immeadiately delivered. Fixing this could be tricky... +immediately delivered. Fixing this could be tricky... -=head1 SUBROUTINES - -The following section will only be of interest to the -program maintainers: +Merging of xls files is not (yet) possible. Be free to implement :) =cut +use warnings; use integer; +BEGIN { pop @INC if $INC[-1] eq '.' }; use strict; +use IO::File; +use File::Basename; # use Time::Local; # PH/FANF use POSIX; -use vars qw($HAVE_GD_Graph_pie $HAVE_GD_Graph_linespoints); +if (@ARGV and $ARGV[0] eq '--version') { + print basename($0) . ": $0\n", + "build: EXIM_RELEASE_VERSIONEXIM_VARIANT_VERSION\n", + "perl(runtime): $]\n"; + exit 0; +} + +use vars qw($HAVE_GD_Graph_pie $HAVE_GD_Graph_linespoints $HAVE_Spreadsheet_WriteExcel); eval { require GD::Graph::pie; }; $HAVE_GD_Graph_pie = $@ ? 0 : 1; eval { require GD::Graph::linespoints; }; $HAVE_GD_Graph_linespoints = $@ ? 0 : 1; +eval { require Spreadsheet::WriteExcel; }; +$HAVE_Spreadsheet_WriteExcel = $@ ? 0 : 1; ################################################## @@ -398,6 +580,7 @@ $HAVE_GD_Graph_linespoints = $@ ? 0 : 1; use vars qw(@tab62 @days_per_month $gig); use vars qw($VERSION); use vars qw($COLUMN_WIDTHS); +use vars qw($WEEK $DAY $HOUR $MINUTE); @tab62 = @@ -411,52 +594,92 @@ use vars qw($COLUMN_WIDTHS); @days_per_month = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334); $gig = 1024 * 1024 * 1024; -$VERSION = '1.31'; +$VERSION = '1.61'; # How much space do we allow for the Hosts/Domains/Emails/Edomains column headers? $COLUMN_WIDTHS = 8; +$MINUTE = 60; +$HOUR = 60 * $MINUTE; +$DAY = 24 * $HOUR; +$WEEK = 7 * $DAY; + # Declare global variables. use vars qw($total_received_data $total_received_data_gigs $total_received_count); -use vars qw($total_delivered_data $total_delivered_data_gigs $total_delivered_count); -use vars qw(%arrival_time %size %from_host %from_address); -use vars qw(%timestamp2time); #Hash of timestamp => time. -use vars qw($last_timestamp $last_time); #The last time convertion done. -use vars qw($last_date $date_seconds); #The last date convertion done. -use vars qw($last_offset $offset_seconds); #The last time offset convertion done. +use vars qw($total_delivered_data $total_delivered_data_gigs $total_delivered_messages $total_delivered_addresses); +use vars qw(%timestamp2time); #Hash of timestamp => time. +use vars qw($last_timestamp $last_time); #The last time conversion done. +use vars qw($last_date $date_seconds); #The last date conversion done. +use vars qw($last_offset $offset_seconds); #The last time offset conversion done. use vars qw($localtime_offset); -use vars qw($i); #General loop counter. -use vars qw($debug); #Debug mode? -use vars qw($ntopchart); #How many entries should make it into the chart? -use vars qw($gddirectory); #Where to put files from GD::Graph +use vars qw($i); #General loop counter. +use vars qw($debug); #Debug mode? +use vars qw($ntopchart); #How many entries should make it into the chart? +use vars qw($gddirectory); #Where to put files from GD::Graph + +# SpamAssassin variables +use vars qw($spam_score $spam_score_gigs); +use vars qw($ham_score $ham_score_gigs); +use vars qw(%ham_count_by_ip %spam_count_by_ip); +use vars qw(%rejected_count_by_ip %rejected_count_by_reason); +use vars qw(%temporarily_rejected_count_by_ip %temporarily_rejected_count_by_reason); + +#For use in Spreadsheet::WriteExcel +use vars qw($workbook $ws_global $ws_relayed $ws_errors); +use vars qw($row $col $row_hist $col_hist); +use vars qw($run_hist); +use vars qw($f_default $f_header1 $f_header2 $f_header2_m $f_headertab $f_percent); #Format Header + +# Output FileHandles +use vars qw($txt_fh $htm_fh $xls_fh); $ntopchart = 5; # The following are parameters whose values are # set by command line switches: use vars qw($show_errors $show_relay $show_transport $transport_pattern); -use vars qw($topcount $local_league_table $include_remote_users); -use vars qw($hist_opt $hist_interval $hist_number $volume_rounding); -use vars qw($relay_pattern @queue_times $html @user_patterns @user_descriptions); +use vars qw($topcount $local_league_table $include_remote_users $do_local_domain); +use vars qw($hist_opt $hist_interval $hist_number $volume_rounding $emptyOK); +use vars qw($relay_pattern @queue_times @user_patterns @user_descriptions); +use vars qw(@rcpt_times @delivery_times); +use vars qw($include_original_destination); +use vars qw($txt_fh $htm_fh $xls_fh); use vars qw(%do_sender); #Do sender by Host, Domain, Email, and/or Edomain tables. use vars qw($charts $chartrel $chartdir $charts_option_specified); -use vars qw($merge_reports); #Merge old reports ? +use vars qw($merge_reports); #Merge old reports ? # The following are modified in the parse() routine, and # referred to in the print_*() routines. -use vars qw($queue_more_than $delayed_count $relayed_unshown $begin $end); +use vars qw($delayed_count $relayed_unshown $begin $end); +use vars qw(%messages @message); use vars qw(%received_count %received_data %received_data_gigs); -use vars qw(%delivered_count %delivered_data %delivered_data_gigs); +use vars qw(%delivered_messages %delivered_data %delivered_data_gigs %delivered_addresses); use vars qw(%received_count_user %received_data_user %received_data_gigs_user); -use vars qw(%delivered_count_user %delivered_data_user %delivered_data_gigs_user); +use vars qw(%delivered_messages_user %delivered_addresses_user %delivered_data_user %delivered_data_gigs_user); +use vars qw(%delivered_messages_local_domain %delivered_addresses_local_domain %delivered_data_local_domain %delivered_data_gigs_local_domain); use vars qw(%transported_count %transported_data %transported_data_gigs); -use vars qw(%remote_delivered %relayed %delayed %had_error %errors_count); -use vars qw(@queue_bin @remote_queue_bin @received_interval_count @delivered_interval_count); -use vars qw(@user_pattern_totals); +use vars qw(%relayed %errors_count $message_errors); +use vars qw(@qt_all_bin @qt_remote_bin); +use vars qw($qt_all_overflow $qt_remote_overflow); +use vars qw(@dt_all_bin @dt_remote_bin %rcpt_times_bin); +use vars qw($dt_all_overflow $dt_remote_overflow %rcpt_times_overflow); +use vars qw(@received_interval_count @delivered_interval_count); +use vars qw(@user_pattern_totals @user_pattern_interval_count); use vars qw(%report_totals); +# Enumerations +use vars qw($SIZE $FROM_HOST $FROM_ADDRESS $ARRIVAL_TIME $REMOTE_DELIVERED $PROTOCOL); +use vars qw($DELAYED $HAD_ERROR); +$SIZE = 0; +$FROM_HOST = 1; +$FROM_ADDRESS = 2; +$ARRIVAL_TIME = 3; +$REMOTE_DELIVERED = 4; +$DELAYED = 5; +$HAD_ERROR = 6; +$PROTOCOL = 7; @@ -464,22 +687,51 @@ use vars qw(%report_totals); # Subroutines # ################################################## +####################################################################### +# get_filehandle($file,\%output_files); +# Return a filehandle writing to $file. +# +# If %output_files is defined, check that $output_files{$file} +# doesn't exist and die if it does, or set it if it doesn't. +####################################################################### +sub get_filehandle { + my($file,$output_files_href) = @_; -=head2 volume_rounded(); + $file = '-' if ($file eq ''); - $rounded_volume = volume_rounded($bytes,$gigabytes); + if (defined $output_files_href) { + die "You can only output to '$file' once! Use -h for help.\n" if exists $output_files_href->{$file}; + $output_files_href->{$file} = 1; + } -Given a data size in bytes, round it to KB, MB, or GB -as appropriate. + if ($file eq '-') { + return \*STDOUT; + } -Eg 12000 => 12KB, 15000000 => 14GB, etc. + if (-e $file) { + unlink $file or die "Failed to rm $file: $!"; + } -Note: I've experimented with Math::BigInt and it results in a 33% -performance degredation as opposed to storing numbers split into -bytes and gigabytes. + my $fh = new IO::File $file, O_WRONLY|O_CREAT|O_EXCL; + die "new IO::File $file failed: $!" unless (defined $fh); + return $fh; +} -=cut +####################################################################### +# volume_rounded(); +# +# $rounded_volume = volume_rounded($bytes,$gigabytes); +# +# Given a data size in bytes, round it to KB, MB, or GB +# as appropriate. +# +# Eg 12000 => 12KB, 15000000 => 14GB, etc. +# +# Note: I've experimented with Math::BigInt and it results in a 33% +# performance degredation as opposed to storing numbers split into +# bytes and gigabytes. +####################################################################### sub volume_rounded { my($x,$g) = @_; $x = 0 unless $x; @@ -515,28 +767,33 @@ sub volume_rounded { } else { # We don't want any rounding to be done. - $rounded = sprintf("%4d", ($g * $gig) + $x); + # and we don't need broken formatted output which on one hand avoids numbers from + # being interpreted as string by Spreadsheet Calculators, on the other hand + # breaks if more than 4 digits! -> flexible length instead of fixed length + # Format the return value at the output routine! -fh + #$rounded = sprintf("%d", ($g * $gig) + $x); + no integer; + $rounded = sprintf("%.0f", ($g * $gig) + $x); } return $rounded; } -=head2 un_round(); - - un_round($rounded_volume,\$bytes,\$gigabytes); - -Given a volume in KB, MB or GB, as generated by volume_rounded(), -do the reverse transformation and convert it back into Bytes and Gigabytes. -These are added to the $bytes and $gigabytes parameters. - -Given a data size in bytes, round it to KB, MB, or GB -as appropriate. - -EG: 500 => (500,0), 14GB => (0,14), etc. - -=cut - +####################################################################### +# un_round(); +# +# un_round($rounded_volume,\$bytes,\$gigabytes); +# +# Given a volume in KB, MB or GB, as generated by volume_rounded(), +# do the reverse transformation and convert it back into Bytes and Gigabytes. +# These are added to the $bytes and $gigabytes parameters. +# +# Given a data size in bytes, round it to KB, MB, or GB +# as appropriate. +# +# EG: 500 => (500,0), 14GB => (0,14), etc. +####################################################################### sub un_round { my($rounded,$bytes_sref,$gigabytes_sref) = @_; @@ -552,7 +809,9 @@ sub un_round { $$bytes_sref += ($1 % (1024 * 1024) * 1024); } elsif ($rounded =~ /(\d+)/) { - $$gigabytes_sref += $1 / $gig; + # We need to turn off integer in case we are merging an -nvr report. + no integer; + $$gigabytes_sref += int($1 / $gig); $$bytes_sref += $1 % $gig; } @@ -561,40 +820,37 @@ sub un_round { } -=head2 add_volume(); - - add_volume(\$bytes,\$gigs,$size); - -Add $size to $bytes/$gigs where this is a number split into -bytes ($bytes) and gigabytes ($gigs). This is significantly -faster than using Math::BigInt. - -=cut - +####################################################################### +# add_volume(); +# +# add_volume(\$bytes,\$gigs,$size); +# +# Add $size to $bytes/$gigs where this is a number split into +# bytes ($bytes) and gigabytes ($gigs). This is significantly +# faster than using Math::BigInt. +####################################################################### sub add_volume { -my($bytes_ref,$gigs_ref,$size) = @_; -$$bytes_ref = 0 if ! defined $$bytes_ref; -$$gigs_ref = 0 if ! defined $$gigs_ref; -$$bytes_ref += $size; -while ($$bytes_ref > $gig) - { - $$gigs_ref++; - $$bytes_ref -= $gig; + my($bytes_ref,$gigs_ref,$size) = @_; + $$bytes_ref = 0 if ! defined $$bytes_ref; + $$gigs_ref = 0 if ! defined $$gigs_ref; + $$bytes_ref += $size; + while ($$bytes_ref > $gig) { + $$gigs_ref++; + $$bytes_ref -= $gig; } } -=head2 format_time(); - - $formatted_time = format_time($seconds); - -Given a time in seconds, break it down into -weeks, days, hours, minutes, and seconds. - -Eg 12005 => 3h20m5s - -=cut - +####################################################################### +# format_time(); +# +# $formatted_time = format_time($seconds); +# +# Given a time in seconds, break it down into +# weeks, days, hours, minutes, and seconds. +# +# Eg 12005 => 3h20m5s +####################################################################### sub format_time { my($t) = pop @_; my($s) = $t % 60; @@ -615,21 +871,20 @@ $p; } -=head2 unformat_time(); - - $seconds = unformat_time($formatted_time); - -Given a time in weeks, days, hours, minutes, or seconds, convert it to seconds. - -Eg 3h20m5s => 12005 - -=cut - +####################################################################### +# unformat_time(); +# +# $seconds = unformat_time($formatted_time); +# +# Given a time in weeks, days, hours, minutes, or seconds, convert it to seconds. +# +# Eg 3h20m5s => 12005 +####################################################################### sub unformat_time { - my($formated_time) = pop @_; + my($formatted_time) = pop @_; my $time = 0; - while ($formated_time =~ s/^(\d+)([wdhms]?)//) { + while ($formatted_time =~ s/^(\d+)([wdhms]?)//) { $time += $1 if ($2 eq '' || $2 eq 's'); $time += $1 * 60 if ($2 eq 'm'); $time += $1 * 60 * 60 if ($2 eq 'h'); @@ -640,40 +895,40 @@ sub unformat_time { } -=head2 seconds(); - - $time = seconds($timestamp); - -Given a time-of-day timestamp, convert it into a time() value using -POSIX::mktime. We expect the timestamp to be of the form -"$year-$mon-$day $hour:$min:$sec", with month going from 1 to 12, -and the year to be absolute (we do the necessary conversions). The -timestamp may be followed with an offset from UTC like "+$hh$mm"; if the -offset is not present, and we have not been told that the log is in UTC -(with the -utc option), then we adjust the time by the current local -time offset so that it can be compared with the time recorded in message -IDs, which is UTC. - -To improve performance, we only use mktime on the date ($year-$mon-$day), -and only calculate it if the date is different to the previous time we -came here. We then add on seconds for the '$hour:$min:$sec'. - -We also store the results of the last conversion done, and only -recalculate if the date is different. - -We used to have the '-cache' flag which would store the results of the -mktime() call. However, the current way of just using mktime() on the -date obsoletes this. - -=cut - +####################################################################### +# seconds(); +# +# $time = seconds($timestamp); +# +# Given a time-of-day timestamp, convert it into a time() value using +# POSIX::mktime. We expect the timestamp to be of the form +# "$year-$mon-$day $hour:$min:$sec", with month going from 1 to 12, +# and the year to be absolute (we do the necessary conversions). The +# seconds value can be followed by decimals, which we ignore. The +# timestamp may be followed with an offset from UTC like "+$hh$mm"; if the +# offset is not present, and we have not been told that the log is in UTC +# (with the -utc option), then we adjust the time by the current local +# time offset so that it can be compared with the time recorded in message +# IDs, which is UTC. +# +# To improve performance, we only use mktime on the date ($year-$mon-$day), +# and only calculate it if the date is different to the previous time we +# came here. We then add on seconds for the '$hour:$min:$sec'. +# +# We also store the results of the last conversion done, and only +# recalculate if the date is different. +# +# We used to have the '-cache' flag which would store the results of the +# mktime() call. However, the current way of just using mktime() on the +# date obsoletes this. +####################################################################### sub seconds { my($timestamp) = @_; # Is the timestamp the same as the last one? return $last_time if ($last_timestamp eq $timestamp); - return 0 unless ($timestamp =~ /^((\d{4})\-(\d\d)-(\d\d))\s(\d\d):(\d\d):(\d\d)( ([+-])(\d\d)(\d\d))?/o); + return 0 unless ($timestamp =~ /^((\d{4})\-(\d\d)-(\d\d))\s(\d\d):(\d\d):(\d\d)(?:\.\d+)?( ([+-])(\d\d)(\d\d))?/o); unless ($last_date eq $1) { $last_date = $1; @@ -684,8 +939,8 @@ sub seconds { } my $time = $date_seconds + ($5 * 3600) + ($6 * 60) + $7; - # SC. Use cacheing. Also note we want seconds not minutes. - #my($this_offset) = ($10 * 60 + $11) * ($9 . "1") if defined $8; + # SC. Use caching. Also note we want seconds not minutes. + #my($this_offset) = ($10 * 60 + $12) * ($9 . "1") if defined $8; if (defined $8 && ($8 ne $last_offset)) { $last_offset = $8; $offset_seconds = ($10 * 60 + $11) * 60; @@ -693,7 +948,7 @@ sub seconds { } - if (defined $7) { + if (defined $8) { #$time -= $this_offset; $time -= $offset_seconds; } elsif (defined $localtime_offset) { @@ -708,14 +963,13 @@ sub seconds { } -=head2 id_seconds(); - - $time = id_seconds($message_id); - -Given a message ID, convert it into a time() value. - -=cut - +####################################################################### +# id_seconds(); +# +# $time = id_seconds($message_id); +# +# Given a message ID, convert it into a time() value. +####################################################################### sub id_seconds { my($sub_id) = substr((pop @_), 0, 6); my($s) = 0; @@ -724,20 +978,58 @@ while($#c >= 0) { $s = $s * 62 + $tab62[ord(shift @c) - ord('0')] } $s; } +####################################################################### +# wdhms_seconds(); +# +# $seconds = wdhms_seconds($string); +# +# Convert a string in a week/day/hour/minute/second format (eg 4h10s) +# into seconds. +####################################################################### +sub wdhms_seconds { + if ($_[0] =~ /^(?:(\d+)w)?(?:(\d+)d)?(?:(\d+)h)?(?:(\d+)m)?(?:(\d+)s)?/) { + return((($1||0) * $WEEK) + (($2||0) * $DAY) + (($3||0) * $HOUR) + (($4||0) * $MINUTE) + ($5||0)); + } + return undef; +} +####################################################################### +# queue_time(); +# +# $queued = queue_time($completed_tod, $arrival_time, $id); +# +# Given the completed time of day and either the arrival time +# (preferred), or the message ID, calculate how long the message has +# been on the queue. +# +####################################################################### +sub queue_time { + my($completed_tod, $arrival_time, $id) = @_; + + # Note: id_seconds() benchmarks as 42% slower than seconds() + # and computing the time accounts for a significant portion of + # the run time. + if (defined $arrival_time) { + return(seconds($completed_tod) - seconds($arrival_time)); + } + else { + return(seconds($completed_tod) - id_seconds($id)); + } +} -=head2 calculate_localtime_offset(); - - $localtime_offset = calculate_localtime_offset(); - -Calculate the the localtime offset from gmtime in seconds. - - $localtime = time() + $localtime_offset. - -These are the same semantics as ISO 8601 and RFC 2822 timezone offsets. -(West is negative, East is positive.) -=cut +####################################################################### +# calculate_localtime_offset(); +# +# $localtime_offset = calculate_localtime_offset(); +# +# Calculate the the localtime offset from gmtime in seconds. +# +# $localtime = time() + $localtime_offset. +# +# These are the same semantics as ISO 8601 and RFC 2822 timezone offsets. +# (West is negative, East is positive.) +####################################################################### # $localtime = gmtime() + $localtime_offset. OLD COMMENT # This subroutine commented out as it's not currently in use. @@ -762,158 +1054,212 @@ sub calculate_localtime_offset { } -=head2 print_queue_times(); - - $time = print_queue_times($message_type,\@queue_times,$queue_more_than); -Given the type of messages being output, the array of message queue times, -and the number of messages which exceeded the queue times, print out -a table. - -=cut - -sub print_queue_times { +####################################################################### +# print_duration_table(); +# +# print_duration_table($title, $message_type, \@times, \@values, $overflow); +# +# Print a table showing how long a particular step took for +# the messages. The parameters are: +# $title Eg "Time spent on the queue" +# $message_type Eg "Remote" +# \@times The maximum time a message took for it to increment +# the corresponding @values counter. +# \@values An array of message counters. +# $overflow The number of messages which exceeded the maximum +# time. +####################################################################### +sub print_duration_table { no integer; -my($string,$array,$queue_more_than) = @_; +my($title, $message_type, $times_aref, $values_aref, $overflow) = @_; my(@chartdatanames); my(@chartdatavals); my $printed_one = 0; my $cumulative_percent = 0; -#$queue_unknown += keys %arrival_time; -my $queue_total = $queue_more_than; -for ($i = 0; $i <= $#queue_times; $i++) { $queue_total += $$array[$i] } +my $queue_total = $overflow; +map {$queue_total += $_} @$values_aref; + +my $temp = "$title: $message_type"; -my $temp = "Time spent on the queue: $string"; -my($format); -if ($html) { - print "

$temp

\n"; - print "\n"; - print "
\n"; - print "\n"; - print "\n"; - $format = "\n"; +my $txt_format = "%5s %4s %6d %5.1f%% %5.1f%%\n"; +my $htm_format = "\n"; + +# write header +printf $txt_fh ("%s\n%s\n\n", $temp, "-" x length($temp)) if $txt_fh; +if ($htm_fh) { + print $htm_fh "

$temp

\n"; + print $htm_fh "
TimeMessagesPercentageCumulative Percentage
%s %s%d%5.1f%%%5.1f%%
%s %s%d%5.1f%%%5.1f%%
\n"; + print $htm_fh "\n"; } -else -{ - printf("%s\n%s\n\n", $temp, "-" x length($temp)); - $format = "%5s %4s %6d %5.1f%% %5.1f%%\n"; +if ($xls_fh) { + $ws_global->write($row++, $col, "$title: ".$message_type, $f_header2); + my @content=("Time", "Messages", "Percentage", "Cumulative Percentage"); + &set_worksheet_line($ws_global, $row++, 1, \@content, $f_headertab); } -for ($i = 0; $i <= $#queue_times; $i++) { - if ($$array[$i] > 0) + +for ($i = 0; $i <= $#$times_aref; ++$i) { + if ($$values_aref[$i] > 0) { - my $percent = ($$array[$i] * 100)/$queue_total; + my $percent = ($values_aref->[$i] * 100)/$queue_total; $cumulative_percent += $percent; - printf($format, - $printed_one? " " : "Under", - format_time($queue_times[$i]), - $$array[$i], $percent, $cumulative_percent); - if (!defined($queue_times[$i])) { - print "Not defined"; + + my @content=($printed_one? " " : "Under", + format_time($times_aref->[$i]), + $values_aref->[$i], $percent, $cumulative_percent); + + if ($htm_fh) { + printf $htm_fh ($htm_format, @content); + if (!defined($values_aref->[$i])) { + print $htm_fh "Not defined"; + } + } + if ($txt_fh) { + printf $txt_fh ($txt_format, @content); + if (!defined($times_aref->[$i])) { + print $txt_fh "Not defined"; + } + } + if ($xls_fh) + { + no integer; + &set_worksheet_line($ws_global, $row, 0, [@content[0,1,2]], $f_default); + &set_worksheet_line($ws_global, $row++, 3, [$content[3]/100,$content[4]/100], $f_percent); + + if (!defined($times_aref->[$i])) { + $col=0; + $ws_global->write($row++, $col, "Not defined" ); + } } + push(@chartdatanames, - ($printed_one? "" : "Under") . format_time($queue_times[$i])); - push(@chartdatavals, $$array[$i]); + ($printed_one? "" : "Under") . format_time($times_aref->[$i])); + push(@chartdatavals, $$values_aref[$i]); $printed_one = 1; } } -if ($queue_more_than > 0) { - my $percent = ($queue_more_than * 100)/$queue_total; +if ($overflow && $overflow > 0) { + my $percent = ($overflow * 100)/$queue_total; $cumulative_percent += $percent; - printf($format, - "Over ", - format_time($queue_times[$#queue_times]), - $queue_more_than, $percent, $cumulative_percent); + + my @content = ("Over ", format_time($times_aref->[-1]), + $overflow, $percent, $cumulative_percent); + + printf $txt_fh ($txt_format, @content) if $txt_fh; + printf $htm_fh ($htm_format, @content) if $htm_fh; + if ($xls_fh) + { + &set_worksheet_line($ws_global, $row, 0, [@content[0,1,2]], $f_default); + &set_worksheet_line($ws_global, $row++, 3, [$content[3]/100,$content[4]/100], $f_percent); + } + } -push(@chartdatanames, "Over " . format_time($queue_times[$#queue_times])); -push(@chartdatavals, $queue_more_than); + +push(@chartdatanames, "Over " . format_time($times_aref->[-1])); +push(@chartdatavals, $overflow); #printf("Unknown %6d\n", $queue_unknown) if $queue_unknown > 0; -if ($html) { - print "
TimeMessagesPercentageCumulative Percentage
\n"; - print "
\n"; +if ($htm_fh) { + print $htm_fh "
"; - if ($HAVE_GD_Graph_pie && $charts) { + if ($HAVE_GD_Graph_pie && $charts && ($#chartdatavals > 0)) { my @data = ( \@chartdatanames, \@chartdatavals ); my $graph = GD::Graph::pie->new(200, 200); - my $pngname; - my $title; - if ($string =~ /all/) { $pngname = "queue_all.png"; $title = "Queue (all)"; } - if ($string =~ /remote/) { $pngname = "queue_rem.png"; $title = "Queue (remote)"; } - $graph->set( - title => $title, - ); + my $pngname = "$title-$message_type.png"; + $pngname =~ s/[^\w\-\.]/_/; + + my $graph_title = "$title ($message_type)"; + $graph->set(title => $graph_title) if (length($graph_title) < 21); + my $gd = $graph->plot(\@data) or warn($graph->error); if ($gd) { - open(IMG, ">$chartdir/$pngname") or die $!; + open(IMG, ">$chartdir/$pngname") or die "Could not write $chartdir/$pngname: $!\n"; binmode IMG; print IMG $gd->png; close IMG; - print ""; + print $htm_fh ""; } } - print "
\n"; -} -print "\n"; + print $htm_fh "\n"; } +if ($xls_fh) +{ + $row++; +} +print $txt_fh "\n" if $txt_fh; +print $htm_fh "\n" if $htm_fh; +} -=head2 print_histogram(); - - print_histogram('Deliverieds|Messages received',@interval_count); - -Print a histogram of the messages delivered/received per time slot -(hour by default). - -=cut +####################################################################### +# print_histogram(); +# +# print_histogram('Deliveries|Messages received|$pattern', $unit, @interval_count); +# +# Print a histogram of the messages delivered/received per time slot +# (hour by default). +####################################################################### sub print_histogram { -my($text) = shift; -my(@interval_count) = @_; +my($text, $unit, @interval_count) = @_; my(@chartdatanames); my(@chartdatavals); my($maxd) = 0; + +# save first row of print_histogram for xls output +if (!$run_hist) { + $row_hist = $row; +} +else { + $row = $row_hist; +} + for ($i = 0; $i < $hist_number; $i++) { $maxd = $interval_count[$i] if $interval_count[$i] > $maxd; } my $scale = int(($maxd + 25)/50); $scale = 1 if $scale == 0; -my($type); -if ($text eq "Deliveries") - { - $type = ($scale == 1)? "delivery" : "deliveries"; - } -else - { - $type = ($scale == 1)? "message" : "messages"; +if ($scale != 1) { + if ($unit !~ s/y$/ies/) { + $unit .= 's'; } +} -my($title) = sprintf("$text per %s (each dot is $scale $type)", - ($hist_interval == 60)? "hour" : - ($hist_interval == 1)? "minute" : "$hist_interval minutes"); +# make and output title +my $title = sprintf("$text per %s", + ($hist_interval == 60)? "hour" : + ($hist_interval == 1)? "minute" : "$hist_interval minutes"); -if ($html) { - print "

$title

\n"; - print "\n"; - print "
\n";
+my $txt_htm_title = $title . " (each dot is $scale $unit)";
+
+printf $txt_fh ("%s\n%s\n\n", $txt_htm_title, "-" x length($txt_htm_title)) if $txt_fh;
+
+if ($htm_fh) {
+  print $htm_fh "

$txt_htm_title

\n"; + print $htm_fh "\n"; + print $htm_fh "
\n";
 }
-else {
-  printf("%s\n%s\n\n", $title, "-" x length($title));
+
+if ($xls_fh) {
+  $title =~ s/Messages/Msg/ ;
+  $row += 2;
+  $ws_global->write($row++, $col_hist+1, $title, $f_headertab);
 }
 
+
 my $hour = 0;
 my $minutes = 0;
-for ($i = 0; $i < $hist_number; $i++)
-  {
+for ($i = 0; $i < $hist_number; $i++) {
   my $c = $interval_count[$i];
 
   # If the interval is an hour (the maximum) print the starting and
@@ -921,37 +1267,59 @@ for ($i = 0; $i < $hist_number; $i++)
   # minutes, which take up the same space.
 
   my $temp;
-  if ($hist_opt == 1)
-    {
+  if ($hist_opt == 1) {
     $temp = sprintf("%02d-%02d", $hour, $hour + 1);
-    print $temp;
+
+    print $txt_fh $temp if $txt_fh;
+    print $htm_fh $temp if $htm_fh;
+
+    if ($xls_fh) {
+      if ($run_hist==0) {
+        # only on first run
+        $ws_global->write($row, 0, [$temp], $f_default);
+      }
+    }
+
     push(@chartdatanames, $temp);
     $hour++;
-    }
-  else
-    {
+  }
+  else {
     if ($minutes == 0)
       { $temp = sprintf("%02d:%02d", $hour, $minutes) }
     else
       { $temp = sprintf("  :%02d", $minutes) }
-    print $temp;
+
+    print $txt_fh $temp if $txt_fh;
+    print $htm_fh $temp if $htm_fh;
+    if (($xls_fh) and ($run_hist==0)) {
+      # only on first run
+      $temp = sprintf("%02d:%02d", $hour, $minutes);
+      $ws_global->write($row, 0, [$temp], $f_default);
+    }
+
     push(@chartdatanames, $temp);
     $minutes += $hist_interval;
-    if ($minutes >= 60)
-      {
+    if ($minutes >= 60) {
       $minutes = 0;
       $hour++;
-      }
     }
-  push(@chartdatavals, $c);
-  printf(" %6d %s\n", $c, "." x ($c/$scale));
   }
-print "\n";
-if ($html)
-  {
-  print "
\n"; - print "
\n"; - if ($HAVE_GD_Graph_linespoints && $charts) { + push(@chartdatavals, $c); + + printf $txt_fh (" %6d %s\n", $c, "." x ($c/$scale)) if $txt_fh; + printf $htm_fh (" %6d %s\n", $c, "." x ($c/$scale)) if $htm_fh; + $ws_global->write($row++, $col_hist+1, [$c], $f_default) if $xls_fh; + +} #end for + +printf $txt_fh "\n" if $txt_fh; +printf $htm_fh "\n" if $htm_fh; + +if ($htm_fh) +{ + print $htm_fh "\n"; + print $htm_fh "\n"; + if ($HAVE_GD_Graph_linespoints && $charts && ($#chartdatavals > 0)) { # calculate the graph my @data = ( \@chartdatanames, @@ -964,211 +1332,292 @@ if ($html) title => $text, x_labels_vertical => 1 ); - my($pngname); - if ($text =~ /Deliveries/) { $pngname = "histogram_del.png"; } - if ($text =~ /Messages/) { $pngname = "histogram_mes.png"; } + my $pngname = "histogram_$text.png"; + $pngname =~ s/[^\w\._]/_/g; + my $gd = $graph->plot(\@data) or warn($graph->error); if ($gd) { - open(IMG, ">$chartdir/$pngname") or die $!; + open(IMG, ">$chartdir/$pngname") or die "Could not write $chartdir/$pngname: $!\n"; binmode IMG; print IMG $gd->png; close IMG; - print ""; + print $htm_fh ""; } } - print "
\n"; + print $htm_fh "
\n"; } -} - +$col_hist++; # where to continue next times -=head2 print_league_table(); - - print_league_table($league_table_type,\%message_count,\%message_data,\%message_data_gigs); +$row+=2; # leave some space after history block +$run_hist=1; # we have done this once or more +} -Given hashes of message count and message data, which are keyed by -the table type (eg by the sending host), print a league table -showing the top $topcount (defaults to 50). -=cut +####################################################################### +# print_league_table(); +# +# print_league_table($league_table_type,\%message_count,\%address_count,\%message_data,\%message_data_gigs, $spreadsheet, $row_sref); +# +# Given hashes of message count, address count, and message data, +# which are keyed by the table type (eg by the sending host), print a +# league table showing the top $topcount (defaults to 50). +####################################################################### sub print_league_table { -my($text,$m_count,$m_data,$m_data_gigs) = @_; -my($name) = ($topcount == 1)? "$text" : "$topcount ${text}s"; -my($temp) = "Top $name by message count"; -my(@chartdatanames) = (); -my(@chartdatavals) = (); -my $chartotherval = 0; - -my($format); -if ($html) { - print "

$temp

\n"; - print "\n"; - print ""; $sender_txt_header .= " " x ($COLUMN_WIDTHS - length($_)) . $_ . 's'; - $sender_html_format .= ""; - $sender_txt_format .= " " x ($COLUMN_WIDTHS - 5) . "%6d"; + $sender_html_format .= ""; + $sender_txt_format .= " " x ($COLUMN_WIDTHS - 5) . "%6s"; + push(@col_headers,"${_}s"); } - my($format1,$format2); - if ($html) { - print << "EoText"; - -

Grand total summary

-
\n"; - print "\n"; - print "\n"; + my($text,$m_count,$a_count,$m_data,$m_data_gigs,$spreadsheet, $row_sref) = @_; + my($name) = ($topcount == 1)? "$text" : "$topcount ${text}s"; + my($title) = "Top $name by message count"; + my(@chartdatanames) = (); + my(@chartdatavals) = (); + my $chartotherval = 0; + $text = ucfirst($text); # Align non-local addresses to the right (so all the .com's line up). # Local addresses are aligned on the left as they are userids. my $align = ($text !~ /local/i) ? 'right' : 'left'; - $format = "\n"; -} -else { - printf("%s\n%s\n\n", $temp, "-" x length($temp)); - $format = "%7d %10s %s\n"; -} -my($key,$htmlkey); -foreach $key (top_n_sort($topcount,$m_count,$m_data_gigs,$m_data)) { - if ($html) { - $htmlkey = $key; - $htmlkey =~ s/>/\>\;/g; - $htmlkey =~ s/\n"; - print "
MessagesBytes\u$text
%d%s%s\n"; - if ($HAVE_GD_Graph_pie && $charts) - { - # calculate the graph - my @data = ( - \@chartdatanames, - \@chartdatavals - ); - my $graph = GD::Graph::pie->new(300, 300); - $graph->set( - x_label => 'Name', - y_label => 'Amount', - title => 'By count', - ); - my $gd = $graph->plot(\@data) or warn($graph->error); - if ($gd) { - my $temp = $text; - $temp =~ s/ /_/g; - open(IMG, ">$chartdir/${temp}_count.png") or die $!; - binmode IMG; - print IMG $gd->png; - close IMG; - print ""; - } - } - print "\n"; - print "
\n"; -} -print "\n"; - -$temp = "Top $name by volume"; -if ($html) { - print "

$temp

\n"; - print "\n"; - print "
\n"; - print "\n"; - print "\n"; -} -else { - printf("%s\n%s\n\n", $temp, "-" x length($temp)); -} + ################################################ + # Generate the printf formats and table headers. + ################################################ + my(@headers) = ('Messages'); + #push(@headers,'Addresses') if defined $a_count; + push(@headers,'Addresses') if defined $a_count && %$a_count; + push(@headers,'Bytes','Average') if defined $m_data; + + my $txt_format = "%10s " x @headers . " %s\n"; + my $txt_col_headers = sprintf $txt_format, @headers, $text; + my $htm_format = "" . ''x@headers . "\n"; + my $htm_col_headers = sprintf $htm_format, @headers, $text; + $htm_col_headers =~ s/(<\/?)td/$1th/g; #Convert
MessagesBytes\u$text
%s%s
's to 's for the header. -@chartdatanames = (); -@chartdatavals = (); -$chartotherval = 0; -foreach $key (top_n_sort($topcount,$m_data_gigs,$m_data,$m_count)) { - if ($html) { - $htmlkey = $key; - $htmlkey =~ s/>/\>\;/g; - $htmlkey =~ s/

$title

+ +
+ +EoText + print $htm_fh $htm_col_headers } - else { - printf($format, $$m_count{$key}, volume_rounded($$m_data{$key},$$m_data_gigs{$key}), $key); + + if ($xls_fh) { + $spreadsheet->write(${$row_sref}++, 0, $title, $f_header2); + $spreadsheet->write(${$row_sref}++, 0, [@headers, $text], $f_headertab); } - if (scalar @chartdatanames < $ntopchart) - { - push(@chartdatanames, $key); - push(@chartdatavals, $$m_count{$key}); + + # write content + foreach my $key (top_n_sort($topcount,$m_count,$m_data_gigs,$m_data)) { + + # When displaying the average figures, we calculate the average of + # the rounded data, as the user would calculate it. This reduces + # the accuracy slightly, but we have to do it this way otherwise + # when using -merge to convert results from text to HTML and + # vice-versa discrepencies would occur. + my $messages = $$m_count{$key}; + my @content = ($messages); + push(@content, $$a_count{$key}) if defined $a_count; + if (defined $m_data) { + my $rounded_volume = volume_rounded($$m_data{$key},$$m_data_gigs{$key}); + my($data,$gigs) = (0,0); + un_round($rounded_volume,\$data,\$gigs); + my $rounded_average = volume_rounded($data/$messages,$gigs/$messages); + push(@content, $rounded_volume, $rounded_average); } - else - { - $chartotherval += $$m_count{$key}; + + # write content + printf $txt_fh ($txt_format, @content, $key) if $txt_fh; + + if ($htm_fh) { + my $htmlkey = $key; + $htmlkey =~ s/>/\>\;/g; + $htmlkey =~ s/write(${$row_sref}++, 0, [@content, $key], $f_default) if $xls_fh; + + if (scalar @chartdatanames < $ntopchart) { + push(@chartdatanames, $key); + push(@chartdatavals, $$m_count{$key}); + } + else { + $chartotherval += $$m_count{$key}; } } -push(@chartdatanames, "Other"); -push(@chartdatavals, $chartotherval); -if ($html) { - print "
\n"; - print "
\n"; - if ($HAVE_GD_Graph_pie && $charts) { - # calculate the graph - my @data = ( - \@chartdatanames, - \@chartdatavals - ); - my $graph = GD::Graph::pie->new(300, 300); - $graph->set( - x_label => 'Name', - y_label => 'Volume', - title => 'By Volume', - ); - my $gd = $graph->plot(\@data) or warn($graph->error); - if ($gd) { - my $temp = $text; - $temp =~ s/ /_/g; - open(IMG, ">$chartdir/${temp}_volume.png") or die $!; - binmode IMG; - print IMG $gd->png; - close IMG; - print ""; + push(@chartdatanames, "Other"); + push(@chartdatavals, $chartotherval); + + print $txt_fh "\n" if $txt_fh; + if ($htm_fh) { + print $htm_fh "
\n"; + print $htm_fh "
\n"; + if ($HAVE_GD_Graph_pie && $charts && ($#chartdatavals > 0)) + { + # calculate the graph + my @data = ( + \@chartdatanames, + \@chartdatavals + ); + my $graph = GD::Graph::pie->new(300, 300); + $graph->set( + x_label => 'Name', + y_label => 'Amount', + title => 'By count', + ); + my $gd = $graph->plot(\@data) or warn($graph->error); + if ($gd) { + my $temp = $text; + $temp =~ s/ /_/g; + open(IMG, ">$chartdir/${temp}_count.png") or die "Could not write $chartdir/${temp}_count.png: $!\n"; + binmode IMG; + print IMG $gd->png; + close IMG; + print $htm_fh ""; + } } + print $htm_fh "\n"; + print $htm_fh "
\n\n"; } - print "
\n"; - print "
\n"; -} + ++${$row_sref} if $xls_fh; -print "\n"; -} + if (defined $m_data) { + # write header -=head2 top_n_sort(); + $title = "Top $name by volume"; - @sorted_keys = top_n_sort($n,$href1,$href2,$href3); + printf $txt_fh ("%s\n%s\n%s", $title, "-" x length($title),$txt_col_headers) if $txt_fh; -Given a hash which has numerical values, return the sorted $n keys which -point to the top values. The second and third hashes are used as -tiebreakers. They all must have the same keys. + if ($htm_fh) { + print $htm_fh <

$title

+ +
+ +EoText + print $htm_fh $htm_col_headers; + } + if ($xls_fh) { + $spreadsheet->write(${$row_sref}++, 0, $title, $f_header2); + $spreadsheet->write(${$row_sref}++, 0, [@headers, $text], $f_headertab); + } -The idea behind this routine is that when you only want to see the -top n members of a set, rather than sorting the entire set and then -plucking off the top n, sort through the stack as you go, discarding -any member which is lower than your current n'th highest member. + @chartdatanames = (); + @chartdatavals = (); + $chartotherval = 0; + my $use_gig = 0; + foreach my $key (top_n_sort($topcount,$m_data_gigs,$m_data,$m_count)) { + # The largest volume will be the first (top of the list). + # If it has at least 1 gig, then just use gigabytes to avoid + # risking an integer overflow when generating the pie charts. + if ($$m_data_gigs{$key}) { + $use_gig = 1; + } -This proves to be an order of magnitude faster for large hashes. -On 200,000 lines of mainlog it benchmarked 9 times faster. -On 700,000 lines of mainlog it benchmarked 13.8 times faster. + my $messages = $$m_count{$key}; + my @content = ($messages); + push(@content, $$a_count{$key}) if defined $a_count; + my $rounded_volume = volume_rounded($$m_data{$key},$$m_data_gigs{$key}); + my($data ,$gigs) = (0,0); + un_round($rounded_volume,\$data,\$gigs); + my $rounded_average = volume_rounded($data/$messages,$gigs/$messages); + push(@content, $rounded_volume, $rounded_average ); + + # write content + printf $txt_fh ($txt_format, @content, $key) if $txt_fh; + if ($htm_fh) { + my $htmlkey = $key; + $htmlkey =~ s/>/\>\;/g; + $htmlkey =~ s/write(${$row_sref}++, 0, [@content, $key], $f_default) if $xls_fh; -We assume the values are > 0. -=cut + if (scalar @chartdatanames < $ntopchart) { + if ($use_gig) { + if ($$m_data_gigs{$key}) { + push(@chartdatanames, $key); + push(@chartdatavals, $$m_data_gigs{$key}); + } + } + else { + push(@chartdatanames, $key); + push(@chartdatavals, $$m_data{$key}); + } + } + else { + $chartotherval += ($use_gig) ? $$m_data_gigs{$key} : $$m_data{$key}; + } + } + push(@chartdatanames, "Other"); + push(@chartdatavals, $chartotherval); + + print $txt_fh "\n" if $txt_fh; + if ($htm_fh) { + print $htm_fh "
\n"; + print $htm_fh "
\n"; + if ($HAVE_GD_Graph_pie && $charts && ($#chartdatavals > 0)) { + # calculate the graph + my @data = ( + \@chartdatanames, + \@chartdatavals + ); + my $graph = GD::Graph::pie->new(300, 300); + $graph->set( + x_label => 'Name', + y_label => 'Volume' , + title => 'By Volume', + ); + my $gd = $graph->plot(\@data) or warn($graph->error); + if ($gd) { + my $temp = $text; + $temp =~ s/ /_/g; + open(IMG, ">$chartdir/${temp}_volume.png") or die "Could not write $chartdir/${temp}_volume.png: $!\n"; + binmode IMG; + print IMG $gd->png; + close IMG; + print $htm_fh ""; + } + } + print $htm_fh "\n"; + print $htm_fh "
\n\n"; + } + + ++${$row_sref} if $xls_fh; + } +} + +####################################################################### +# top_n_sort(); +# +# @sorted_keys = top_n_sort($n,$href1,$href2,$href3); +# +# Given a hash which has numerical values, return the sorted $n keys which +# point to the top values. The second and third hashes are used as +# tiebreakers. They all must have the same keys. +# +# The idea behind this routine is that when you only want to see the +# top n members of a set, rather than sorting the entire set and then +# plucking off the top n, sort through the stack as you go, discarding +# any member which is lower than your current n'th highest member. +# +# This proves to be an order of magnitude faster for large hashes. +# On 200,000 lines of mainlog it benchmarked 9 times faster. +# On 700,000 lines of mainlog it benchmarked 13.8 times faster. +# +# We assume the values are > 0. +####################################################################### sub top_n_sort { my($n,$href1,$href2,$href3) = @_; @@ -1192,6 +1641,12 @@ sub top_n_sort { my $n_minus_1 = $n - 1; my $n_minus_2 = $n - 2; + # Create a dummy hash incase the user has not provided us with + # tiebreaker hashes. + my(%dummy_hash); + $href2 = \%dummy_hash unless defined $href2; + $href3 = \%dummy_hash unless defined $href3; + # Pick out the top $n keys. my($key,$value1,$value2,$value3,$i,$comparison,$insert_position); while (($key,$value1) = each %$href1) { @@ -1199,11 +1654,22 @@ sub top_n_sort { #print STDERR "key $key ($value1,",$href2->{$key},",",$href3->{$key},") <=> ($minimum_value1,$minimum_value2,$minimum_value3)\n"; # Check to see that the new value is bigger than the lowest of the - # top n keys that we're keeping. + # top n keys that we're keeping. We test the main key first, because + # for the majority of cases we can skip creating dummy hash values + # should the user have not provided real tie-breaking hashes. + next unless $value1 >= $minimum_value1; + + # Create a dummy hash entry for the key if required. + # Note that setting the dummy_hash value sets it for both href2 & + # href3. Also note that currently we are guaranteed to have a real + # value for href3 if a real value for href2 exists so don't need to + # test for it as well. + $dummy_hash{$key} = 0 unless exists $href2->{$key}; + $comparison = $value1 <=> $minimum_value1 || - $href2->{$key} <=> $minimum_value2 || - $href3->{$key} <=> $minimum_value3 || - $top_n_key cmp $key; + $href2->{$key} <=> $minimum_value2 || + $href3->{$key} <=> $minimum_value3 || + $top_n_key cmp $key; next unless ($comparison == 1); # As we will be using these values a few times, extract them into scalars. @@ -1227,14 +1693,14 @@ sub top_n_sort { for ($i = 0; $i < $n_minus_1; $i++) { $top_n_key = $top_n_keys[$i]; if ( ($top_n_key eq '_') || - ( ($value1 <=> $href1->{$top_n_key} || + ( ($value1 <=> $href1->{$top_n_key} || $value2 <=> $href2->{$top_n_key} || - $value3 <=> $href3->{$top_n_key} || - $top_n_key cmp $key) == 1 - ) - ) { - $insert_position = $i; - last; + $value3 <=> $href3->{$top_n_key} || + $top_n_key cmp $key) == 1 + ) + ) { + $insert_position = $i; + last; } } @@ -1257,14 +1723,14 @@ sub top_n_sort { } -=head2 html_header(); - - $header = html_header($title); - -Print our HTML header and start the block. - -=cut +####################################################################### +# html_header(); +# +# $header = html_header($title); +# +# Print our HTML header and start the block. +####################################################################### sub html_header { my($title) = @_; my $text = << "EoText"; @@ -1282,64 +1748,82 @@ EoText -=head2 help(); - - help(); - -Display usage instructions and exit. - -=cut - +####################################################################### +# help(); +# +# help(); +# +# Display usage instructions and exit. +####################################################################### sub help { print << "EoText"; eximstats Version $VERSION -Usage: eximstats [Options] mainlog1 mainlog2 ... > report.txt - eximstats -html [Options] mainlog1 mainlog2 ... > report.html - eximstats -merge [Options] report.1.txt report.2.txt ... > weekly_rep.txt - eximstats -merge -html [Options] report.1.html ... > weekly_rep.html +Usage: + eximstats [Output] [Options] mainlog1 mainlog2 ... + eximstats -merge -html [Options] report.1.html ... > weekly_rep.html + +Examples: + eximstats -html=eximstats.html mainlog1 mainlog2 ... + eximstats mainlog1 mainlog2 ... > report.txt + +Parses exim mainlog or syslog files and generates a statistical analysis +of the messages processed. -Parses exim mainlog files and generates a statistical analysis of -the messages processed. Valid options are: +Valid output types are: +-txt[=] plain text (default unless no other type is specified) +-html[=] HTML +-xls[=] Excel +With no type and file given, defaults to -txt and STDOUT. +Valid options are: -h histogram divisions per hour. The default is 1, and 0 suppresses histograms. Other valid values are: - 2, 3, 5, 10, 15, 20, 30 or 60. + 2, 3, 5, 10, 15, 20, 30 or 60. -ne don't display error information -nr don't display relaying information -nr/pattern/ don't display relaying information that matches -nt don't display transport information -nt/pattern/ don't display transport information that matches --nvr don't do volume rounding. Display in bytes, not KB/MB/GB. --q list of times for queuing information - single 0 item suppresses +-nvr don't do volume rounding. Display in bytes, not KB/MB/GB. -t display top sources/destinations default is 50, 0 suppresses top listing -tnl omit local sources/destinations in top listing -t_remote_users show top user sources/destinations from non-local domains +-q list of times for queuing information. -q0 suppresses. +-show_rt Show the receipt times for all the messages. +-show_dt Show the delivery times for all the messages. + is an optional list of times in seconds. + Eg -show_rt1,2,4,8. --byhost show results by sending host (default unless bydomain or +-include_original_destination show both the final and original + destinations in the results rather than just the final ones. + +-byhost show results by sending host (default unless bydomain or byemail is specified) --bydomain show results by sending domain. --byemail show results by sender's email address --byedomain show results by sender's email domain +-bydomain show results by sending domain. +-byemail show results by sender's email address +-byedomain show results by sender's email domain +-bylocaldomain show results by local domain -pattern "Description" /pattern/ Count lines matching specified patterns and show them in - the results. It can be specified multiple times. Eg: - -pattern 'Refused connections' '/refused connection/' + the results. It can be specified multiple times. Eg: + -pattern 'Refused connections' '/refused connection/' -merge merge previously generated reports into a new report --html output the results in HTML --charts Create charts (this requires the GD::Graph modules) +-charts Create charts (this requires the GD::Graph modules). + Only valid with -html. -chartdir Create the charts' png files in the directory -chartrel Specify the relative directory for the "img src=" tags from where to include the charts in the html file - -chartdir and -chartrel default to '.' + -chartdir and -chartrel default to '.' + +-emptyok It is OK if there is no valid input, don't print an error. --d Debug mode - dump the eval'ed parser onto STDERR. +-d Debug mode - dump the eval'ed parser onto STDERR. EoText @@ -1348,58 +1832,111 @@ EoText -=head2 generate_parser(); - - $parser = generate_parser(); - -This subroutine generates the parsing routine which will be -used to parse the mainlog. We take the base operation, and remove bits not in use. -This improves performance depending on what bits you take out or add. - -I've tested using study(), but this does not improve performance. - -We store our parsing routing in a variable, and process it looking for #IFDEF (Expression) -or #IFNDEF (Expression) statements and corresponding #ENDIF (Expression) statements. If -the expression evaluates to true, then it is included/excluded accordingly. - -=cut - +####################################################################### +# generate_parser(); +# +# $parser = generate_parser(); +# +# This subroutine generates the parsing routine which will be +# used to parse the mainlog. We take the base operation, and remove bits not in use. +# This improves performance depending on what bits you take out or add. +# +# I've tested using study(), but this does not improve performance. +# +# We store our parsing routing in a variable, and process it looking for #IFDEF (Expression) +# or #IFNDEF (Expression) statements and corresponding #ENDIF (Expression) statements. If +# the expression evaluates to true, then it is included/excluded accordingly. +####################################################################### sub generate_parser { my $parser = ' my($ip,$host,$email,$edomain,$domain,$thissize,$size,$old,$new); - my($tod,$m_hour,$m_min,$id,$flag); + my($tod,$m_hour,$m_min,$id,$flag,$extra,$length); + my($seconds,$queued,$rcpt_time,$local_domain); + my $rej_id = 0; while (<$fh>) { - next if length($_) < 38; - # PH/FANF - # next unless /^(\\d{4}\\-\\d\\d-\\d\\d\\s(\\d\\d):(\\d\\d):\\d\\d)/; - next unless /^(\\d{4}\\-\\d\\d-\\d\\d\\s(\\d\\d):(\\d\\d):\\d\\d( [-+]\\d\\d\\d\\d)?)/o; + # Convert syslog lines to mainlog format. + if (! /^\\d{4}/) { + next unless s/^.*? exim\\b.*?: //; + } + + $length = length($_); + next if ($length < 38); + next unless /^ + (\\d{4}\\-\\d\\d-\\d\\d\\s # 1: YYYYMMDD HHMMSS + (\\d\\d) # 2: HH + : + (\\d\\d) # 3: MM + :\\d\\d + ) + (\\.\\d+)? # 4: subseconds + (\s[-+]\\d\\d\\d\\d)? # 5: tz-offset + (\s\\[\\d+\\])? # 6: pid + /ox; + + $tod = defined($5) ? $1 . $5 : $1; + ($m_hour,$m_min) = ($2,$3); + + # PH - watch for GMT offsets in the timestamp. + if (defined($5)) { + $extra = 6; + next if ($length < 44); + } + else { + $extra = 0; + } - ($tod,$m_hour,$m_min) = ($1,$2,$3); + # watch for subsecond precision + if (defined($4)) { + $extra += length($4); + next if ($length < 38 + $extra); + } + + # PH - watch for PID added after the timestamp. + if (defined($6)) { + $extra += length($6); + next if ($length < 38 + $extra); + } - # PH - my($extra) = defined($4)? 6 : 0; $id = substr($_, 20 + $extra, 16); $flag = substr($_, 37 + $extra, 2); + + if ($flag !~ /^([<>=*-]+|SA)$/ && /rejected|refused|dropped/) { + $flag = "Re"; + $extra -= 3; + } + + # Rejects can have no MSGID... + if ($flag eq "Re" && $id !~ /^[-0-9a-zA-Z]+$/) { + $id = "reject:" . ++$rej_id; + $extra -= 17; + } '; # Watch for user specified patterns. my $user_pattern_index = 0; foreach (@user_patterns) { $user_pattern_totals[$user_pattern_index] = 0; - $parser .= " \$user_pattern_totals[$user_pattern_index]++ if $_;\n"; + $parser .= " if ($_) {\n"; + $parser .= " \$user_pattern_totals[$user_pattern_index]++;\n"; + $parser .= " \$user_pattern_interval_count[$user_pattern_index][(\$m_hour*60 + \$m_min)/$hist_interval]++;\n" if ($hist_opt > 0); + $parser .= " }\n"; $user_pattern_index++; } $parser .= ' - next unless ($flag =~ /<=|=>|->|==|\\*\\*|Co/); - - #Strip away the timestamp, ID and flag (which could be "Com" for completed) - #This speeds up the later pattern matches. - # $_ = substr($_, 40); + next unless ($flag =~ /<=|=>|->|==|\\*\\*|Co|SA|Re/); + #Strip away the timestamp, ID and flag to speed up later pattern matches. + #The flags include Co (Completed), Re (Rejected), and SA (SpamAssassin). $_ = substr($_, 40 + $extra); # PH + # Alias @message to the array of information about the message. + # This minimises the number of calls to hash functions. + $messages{$id} = [] unless exists $messages{$id}; + *message = $messages{$id}; + + # JN - Skip over certain transports as specified via the "-nt/.../" command # line switch (where ... is a perl style regular expression). This is # required so that transports that skew stats such as SpamAssassin can be @@ -1411,40 +1948,69 @@ sub generate_parser { #ENDIF ($transport_pattern) - $host = "local"; #Host is local unless otherwise specified. - $domain = "localdomain"; #Domain is localdomain unless otherwise specified. - # Do some pattern matches to get the host and IP address. # We expect lines to be of the form "H=[IpAddr]" or "H=Host [IpAddr]" or # "H=Host (UnverifiedHost) [IpAddr]" or "H=(UnverifiedHost) [IpAddr]". # We do 2 separate matches to keep the matches simple and fast. - if (/\\sH=(\\S+)/) { - $host = $1; - - ($ip) = /\\sH=.*?(\\s\\[[^]]+\\])/; - # If there is only an IP address, it will be in $host and $ip will be - # unset. That is OK, because we only use $ip in conjunction with $host - # below. But make it empty to avoid warning messages. - $ip = "" if !defined $ip; - - #IFDEF ($do_sender{Domain}) - if ($host !~ /^\\[/ && $host =~ /^(\\(?)[^\\.]+\\.([^\\.]+\\..*)/) { - # Remove the host portion from the DNS name. We ensure that we end up with - # at least xxx.yyy. $host can be "(x.y.z)" or "x.y.z". - $domain = lc("$1.$2"); - $domain =~ s/^\\.//; #Remove preceding dot. - } - #ENDIF ($do_sender{Domain}) - + # Host is local unless otherwise specified. + # Watch out for "H=([IpAddr])" in case they send "[IpAddr]" as their HELO! + $ip = (/\\bH=(?:|.*? )(\\[[^]]+\\])/) ? $1 + # 2008-03-31 06:25:22 Connection from [213.246.33.217]:39456 refused: too many connections from that IP address // .hs + : (/Connection from (\[\S+\])/) ? $1 + # 2008-03-31 06:52:40 SMTP call from mail.cacoshrf.com (ccsd02.ccsd.local) [69.24.118.229]:4511 dropped: too many nonmail commands (last was "RSET") // .hs + : (/SMTP call from .*?(\[\S+\])/) ? $1 + : "local"; + $host = (/\\bH=(\\S+)/) ? $1 : "local"; + + $domain = "localdomain"; #Domain is localdomain unless otherwise specified. + + #IFDEF ($do_sender{Domain}) + if ($host =~ /^\\[/ || $host =~ /^[\\d\\.]+$/) { + # Host is just an IP address. + $domain = $host; + } + elsif ($host =~ /^(\\(?)[^\\.]+\\.([^\\.]+\\..*)/) { + # Remove the host portion from the DNS name. We ensure that we end up + # with at least xxx.yyy. $host can be "(x.y.z)" or "x.y.z". + $domain = lc("$1.$2"); + $domain =~ s/^\\.//; #Remove preceding dot. } + #ENDIF ($do_sender{Domain}) #IFDEF ($do_sender{Email}) - $email = (/^(\S+)/) ? $1 : ""; + #IFDEF ($include_original_destination) + # Catch both "a@b.com " and "e@f.com" + #$email = (/^(\S+) (<(\S*?)>)?/) ? $3 || $1 : ""; + $email = (/^(\S+ (<[^@>]+@?[^>]*>)?)/) ? $1 : ""; + chomp($email); + #ENDIF ($include_original_destination) + + #IFNDEF ($include_original_destination) + $email = (/^(\S+)/) ? $1 : ""; + #ENDIF ($include_original_destination) #ENDIF ($do_sender{Email}) #IFDEF ($do_sender{Edomain}) - $edomain = (/^\S*?\\@(\S+)/) ? lc($1) : ""; + if (/^(<>|blackhole)/) { + $edomain = $1; + } + #IFDEF ($include_original_destination) + elsif (/^(\S+ (<\S*?\\@(\S+?)>)?)/) { + $edomain = $1; + chomp($edomain); + $edomain =~ s/@(\S+?)>/"@" . lc($1) . ">"/e; + } + #ENDIF ($include_original_destination) + #IFNDEF ($include_original_destination) + elsif (/^\S*?\\@(\S+)/) { + $edomain = lc($1); + } + #ENDIF ($include_original_destination) + else { + $edomain = ""; + } + #ENDIF ($do_sender{Edomain}) if ($tod lt $begin) { @@ -1457,78 +2023,79 @@ sub generate_parser { if ($flag eq "<=") { $thissize = (/\\sS=(\\d+)( |$)/) ? $1 : 0; - $size{$id} = $thissize; + $message[$SIZE] = $thissize; + $message[$PROTOCOL] = (/ P=(\S+)/) ? $1 : undef; #IFDEF ($show_relay) if ($host ne "local") { - # Save incoming information in case it becomes interesting - # later, when delivery lines are read. - my($from) = /^(\\S+)/; - $from_host{$id} = "$host$ip"; - $from_address{$id} = $from; + # Save incoming information in case it becomes interesting + # later, when delivery lines are read. + my($from) = /^(\\S+)/; + $message[$FROM_HOST] = "$host$ip"; + $message[$FROM_ADDRESS] = $from; } #ENDIF ($show_relay) #IFDEF ($local_league_table || $include_remote_users) - if (/\sU=(\\S+)/) { - my $user = $1; + if (/\sU=(\\S+)/) { + my $user = $1; - #IFDEF ($local_league_table && $include_remote_users) - { #Store both local and remote users. - #ENDIF ($local_league_table && $include_remote_users) + #IFDEF ($local_league_table && $include_remote_users) + { #Store both local and remote users. + #ENDIF ($local_league_table && $include_remote_users) - #IFDEF ($local_league_table && ! $include_remote_users) - if ($host eq "local") { #Store local users only. - #ENDIF ($local_league_table && ! $include_remote_users) + #IFDEF ($local_league_table && ! $include_remote_users) + if ($host eq "local") { #Store local users only. + #ENDIF ($local_league_table && ! $include_remote_users) - #IFDEF ($include_remote_users && ! $local_league_table) - if ($host ne "local") { #Store remote users only. - #ENDIF ($include_remote_users && ! $local_league_table) + #IFDEF ($include_remote_users && ! $local_league_table) + if ($host ne "local") { #Store remote users only. + #ENDIF ($include_remote_users && ! $local_league_table) - $received_count_user{$user}++; - add_volume(\\$received_data_user{$user},\\$received_data_gigs_user{$user},$thissize); + ++$received_count_user{$user}; + add_volume(\\$received_data_user{$user},\\$received_data_gigs_user{$user},$thissize); } - } + } #ENDIF ($local_league_table || $include_remote_users) #IFDEF ($do_sender{Host}) - $received_count{Host}{$host}++; - add_volume(\\$received_data{Host}{$host},\\$received_data_gigs{Host}{$host},$thissize); + ++$received_count{Host}{$host}; + add_volume(\\$received_data{Host}{$host},\\$received_data_gigs{Host}{$host},$thissize); #ENDIF ($do_sender{Host}) #IFDEF ($do_sender{Domain}) if ($domain) { - $received_count{Domain}{$domain}++; - add_volume(\\$received_data{Domain}{$domain},\\$received_data_gigs{Domain}{$domain},$thissize); - } + ++$received_count{Domain}{$domain}; + add_volume(\\$received_data{Domain}{$domain},\\$received_data_gigs{Domain}{$domain},$thissize); + } #ENDIF ($do_sender{Domain}) #IFDEF ($do_sender{Email}) - $received_count{Email}{$email}++; - add_volume(\\$received_data{Email}{$email},\\$received_data_gigs{Email}{$email},$thissize); + ++$received_count{Email}{$email}; + add_volume(\\$received_data{Email}{$email},\\$received_data_gigs{Email}{$email},$thissize); #ENDIF ($do_sender{Email}) #IFDEF ($do_sender{Edomain}) - $received_count{Edomain}{$edomain}++; - add_volume(\\$received_data{Edomain}{$edomain},\\$received_data_gigs{Edomain}{$edomain},$thissize); + ++$received_count{Edomain}{$edomain}; + add_volume(\\$received_data{Edomain}{$edomain},\\$received_data_gigs{Edomain}{$edomain},$thissize); #ENDIF ($do_sender{Edomain}) - $total_received_count++; + ++$total_received_count; add_volume(\\$total_received_data,\\$total_received_data_gigs,$thissize); - #IFDEF ($#queue_times >= 0) - $arrival_time{$id} = $tod; - #ENDIF ($#queue_times >= 0) + #IFDEF ($#queue_times >= 0 || $#rcpt_times >= 0) + $message[$ARRIVAL_TIME] = $tod; + #ENDIF ($#queue_times >= 0 || $#rcpt_times >= 0) #IFDEF ($hist_opt > 0) - $received_interval_count[($m_hour*60 + $m_min)/$hist_interval]++; + $received_interval_count[($m_hour*60 + $m_min)/$hist_interval]++; #ENDIF ($hist_opt > 0) } elsif ($flag eq "=>") { - $size = $size{$id} || 0; + $size = $message[$SIZE] || 0; if ($host ne "local") { - $remote_delivered{$id} = 1; + $message[$REMOTE_DELIVERED] = 1; #IFDEF ($show_relay) @@ -1538,24 +2105,24 @@ sub generate_parser { # addresses, there may be a further address between the first # and last. - if (defined $from_host{$id}) { + if (defined $message[$FROM_HOST]) { if (/^(\\S+)(?:\\s+\\([^)]\\))?\\s+<([^>]+)>/) { ($old,$new) = ($1,$2); - } + } else { - $old = $new = ""; - } + $old = $new = ""; + } if ("\\L$new" eq "\\L$old") { ($old) = /^(\\S+)/ if $old eq ""; - my $key = "H=\\L$from_host{$id}\\E A=\\L$from_address{$id}\\E => " . + my $key = "H=\\L$message[$FROM_HOST]\\E A=\\L$message[$FROM_ADDRESS]\\E => " . "H=\\L$host\\E$ip A=\\L$old\\E"; if (!defined $relay_pattern || $key !~ /$relay_pattern/o) { $relayed{$key} = 0 if !defined $relayed{$key}; - $relayed{$key}++; - } + ++$relayed{$key}; + } else { - $relayed_unshown++ + ++$relayed_unshown; } } } @@ -1564,54 +2131,76 @@ sub generate_parser { } #IFDEF ($local_league_table || $include_remote_users) - #IFDEF ($local_league_table && $include_remote_users) - { #Store both local and remote users. - #ENDIF ($local_league_table && $include_remote_users) - - #IFDEF ($local_league_table && ! $include_remote_users) - if ($host eq "local") { #Store local users only. - #ENDIF ($local_league_table && ! $include_remote_users) - - #IFDEF ($include_remote_users && ! $local_league_table) - if ($host ne "local") { #Store remote users only. - #ENDIF ($include_remote_users && ! $local_league_table) - - if (my($user) = split((/\\s]*>)/; - $user = "$user $parent" if defined $parent; - } - $delivered_count_user{$user}++; - add_volume(\\$delivered_data_user{$user},\\$delivered_data_gigs_user{$user},$size); - } - } + #IFDEF ($local_league_table && $include_remote_users) + { #Store both local and remote users. + #ENDIF ($local_league_table && $include_remote_users) + + #IFDEF ($local_league_table && ! $include_remote_users) + if ($host eq "local") { #Store local users only. + #ENDIF ($local_league_table && ! $include_remote_users) + + #IFDEF ($include_remote_users && ! $local_league_table) + if ($host ne "local") { #Store remote users only. + #ENDIF ($include_remote_users && ! $local_league_table) + + if (my($user) = split((/\\s]*>)/; + my($parent) = $_ =~ / (<.+?>) /; #DT 1.54 + if (defined $parent) { + $user = "$user $parent"; + #IFDEF ($do_local_domain) + if ($parent =~ /\\@(.+)>/) { + $local_domain = lc($1); + ++$delivered_messages_local_domain{$local_domain}; + ++$delivered_addresses_local_domain{$local_domain}; + add_volume(\\$delivered_data_local_domain{$local_domain},\\$delivered_data_gigs_local_domain{$local_domain},$size); + } + #ENDIF ($do_local_domain) + } + } + ++$delivered_messages_user{$user}; + ++$delivered_addresses_user{$user}; + add_volume(\\$delivered_data_user{$user},\\$delivered_data_gigs_user{$user},$size); + } + } #ENDIF ($local_league_table || $include_remote_users) #IFDEF ($do_sender{Host}) - $delivered_count{Host}{$host}++; - add_volume(\\$delivered_data{Host}{$host},\\$delivered_data_gigs{Host}{$host},$size); + $delivered_messages{Host}{$host}++; + $delivered_addresses{Host}{$host}++; + add_volume(\\$delivered_data{Host}{$host},\\$delivered_data_gigs{Host}{$host},$size); #ENDIF ($do_sender{Host}) #IFDEF ($do_sender{Domain}) if ($domain) { - $delivered_count{Domain}{$domain}++; - add_volume(\\$delivered_data{Domain}{$domain},\\$delivered_data_gigs{Domain}{$domain},$size); - } + ++$delivered_messages{Domain}{$domain}; + ++$delivered_addresses{Domain}{$domain}; + add_volume(\\$delivered_data{Domain}{$domain},\\$delivered_data_gigs{Domain}{$domain},$size); + } #ENDIF ($do_sender{Domain}) #IFDEF ($do_sender{Email}) - $delivered_count{Email}{$email}++; - add_volume(\\$delivered_data{Email}{$email},\\$delivered_data_gigs{Email}{$email},$size); + ++$delivered_messages{Email}{$email}; + ++$delivered_addresses{Email}{$email}; + add_volume(\\$delivered_data{Email}{$email},\\$delivered_data_gigs{Email}{$email},$size); #ENDIF ($do_sender{Email}) #IFDEF ($do_sender{Edomain}) - $delivered_count{Edomain}{$edomain}++; - add_volume(\\$delivered_data{Edomain}{$edomain},\\$delivered_data_gigs{Edomain}{$edomain},$size); + ++$delivered_messages{Edomain}{$edomain}; + ++$delivered_addresses{Edomain}{$edomain}; + add_volume(\\$delivered_data{Edomain}{$edomain},\\$delivered_data_gigs{Edomain}{$edomain},$size); #ENDIF ($do_sender{Edomain}) - $total_delivered_count++; + ++$total_delivered_messages; + ++$total_delivered_addresses; add_volume(\\$total_delivered_data,\\$total_delivered_data_gigs,$size); #IFDEF ($show_transport) my $transport = (/\\sT=(\\S+)/) ? $1 : ":blackhole:"; - $transported_count{$transport}++; + ++$transported_count{$transport}; add_volume(\\$transported_data{$transport},\\$transported_data_gigs{$transport},$size); #ENDIF ($show_transport) @@ -1619,18 +2208,89 @@ sub generate_parser { $delivered_interval_count[($m_hour*60 + $m_min)/$hist_interval]++; #ENDIF ($hist_opt > 0) + #IFDEF ($#delivery_times > 0) + if (/ DT=(\S+)/) { + $seconds = wdhms_seconds($1); + for ($i = 0; $i <= $#delivery_times; $i++) { + if ($seconds < $delivery_times[$i]) { + ++$dt_all_bin[$i]; + ++$dt_remote_bin[$i] if $message[$REMOTE_DELIVERED]; + last; + } + } + if ($i > $#delivery_times) { + ++$dt_all_overflow; + ++$dt_remote_overflow if $message[$REMOTE_DELIVERED]; + } + } + #ENDIF ($#delivery_times > 0) + + } + + elsif ($flag eq "->") { + + #IFDEF ($local_league_table || $include_remote_users) + #IFDEF ($local_league_table && $include_remote_users) + { #Store both local and remote users. + #ENDIF ($local_league_table && $include_remote_users) + + #IFDEF ($local_league_table && ! $include_remote_users) + if ($host eq "local") { #Store local users only. + #ENDIF ($local_league_table && ! $include_remote_users) + + #IFDEF ($include_remote_users && ! $local_league_table) + if ($host ne "local") { #Store remote users only. + #ENDIF ($include_remote_users && ! $local_league_table) + + if (my($user) = split((/\\s]*>)/; + my($parent) = $_ =~ / (<.+?>) /; #DT 1.54 + $user = "$user $parent" if defined $parent; + } + ++$delivered_addresses_user{$user}; + } + } + #ENDIF ($local_league_table || $include_remote_users) + + #IFDEF ($do_sender{Host}) + $delivered_addresses{Host}{$host}++; + #ENDIF ($do_sender{Host}) + #IFDEF ($do_sender{Domain}) + if ($domain) { + ++$delivered_addresses{Domain}{$domain}; + } + #ENDIF ($do_sender{Domain}) + #IFDEF ($do_sender{Email}) + ++$delivered_addresses{Email}{$email}; + #ENDIF ($do_sender{Email}) + #IFDEF ($do_sender{Edomain}) + ++$delivered_addresses{Edomain}{$edomain}; + #ENDIF ($do_sender{Edomain}) + + ++$total_delivered_addresses; } - elsif ($flag eq "==" && defined($size{$id}) && !defined($delayed{$id})) { - $delayed_count++; - $delayed{$id} = 1; + elsif ($flag eq "==" && defined($message[$SIZE]) && !defined($message[$DELAYED])) { + ++$delayed_count; + $message[$DELAYED] = 1; } elsif ($flag eq "**") { - $had_error{$id} = 1 if defined ($size{$id}); + if (defined ($message[$SIZE])) { + unless (defined $message[$HAD_ERROR]) { + ++$message_errors; + $message[$HAD_ERROR] = 1; + } + } #IFDEF ($show_errors) - $errors_count{$_}++; + ++$errors_count{$_}; #ENDIF ($show_errors) } @@ -1638,32 +2298,177 @@ sub generate_parser { elsif ($flag eq "Co") { #Completed? #IFDEF ($#queue_times >= 0) - #Note: id_seconds() benchmarks as 42% slower than seconds() and computing - #the time accounts for a significant portion of the run time. - my($queued); - if (defined $arrival_time{$id}) { - $queued = seconds($tod) - seconds($arrival_time{$id}); - delete($arrival_time{$id}); - } - else { - $queued = seconds($tod) - id_seconds($id); - } + $queued = queue_time($tod, $message[$ARRIVAL_TIME], $id); for ($i = 0; $i <= $#queue_times; $i++) { if ($queued < $queue_times[$i]) { - $queue_bin[$i]++; - $remote_queue_bin[$i]++ if $remote_delivered{$id}; + ++$qt_all_bin[$i]; + ++$qt_remote_bin[$i] if $message[$REMOTE_DELIVERED]; last; - } - } - $queue_more_than++ if $i > $#queue_times; + } + } + if ($i > $#queue_times) { + ++$qt_all_overflow; + ++$qt_remote_overflow if $message[$REMOTE_DELIVERED]; + } #ENDIF ($#queue_times >= 0) - #IFDEF ($show_relay) - delete($from_host{$id}); - delete($from_address{$id}); - #ENDIF ($show_relay) + #IFDEF ($#rcpt_times >= 0) + if (/ QT=(\S+)/) { + $seconds = wdhms_seconds($1); + #Calculate $queued if not previously calculated above. + #IFNDEF ($#queue_times >= 0) + $queued = queue_time($tod, $message[$ARRIVAL_TIME], $id); + #ENDIF ($#queue_times >= 0) + $rcpt_time = $seconds - $queued; + my($protocol); + + if (defined $message[$PROTOCOL]) { + $protocol = $message[$PROTOCOL]; + + # Create the bin if its not already defined. + unless (exists $rcpt_times_bin{$protocol}) { + initialise_rcpt_times($protocol); + } + } + + for ($i = 0; $i <= $#rcpt_times; ++$i) { + if ($rcpt_time < $rcpt_times[$i]) { + ++$rcpt_times_bin{all}[$i]; + ++$rcpt_times_bin{$protocol}[$i] if defined $protocol; + last; + } + } + + if ($i > $#rcpt_times) { + ++$rcpt_times_overflow{all}; + ++$rcpt_times_overflow{$protocol} if defined $protocol; + } + } + #ENDIF ($#rcpt_times >= 0) + + delete($messages{$id}); + } + elsif ($flag eq "SA") { + $ip = (/From.*?(\\[[^]]+\\])/ || /\\((local)\\)/) ? $1 : ""; + #SpamAssassin message + if (/Action: ((permanently|temporarily) rejected message|flagged as Spam but accepted): score=(\d+\.\d)/) { + #add_volume(\\$spam_score,\\$spam_score_gigs,$3); + ++$spam_count_by_ip{$ip}; + } elsif (/Action: scanned but message isn\'t spam: score=(-?\d+\.\d)/) { + #add_volume(\\$ham_score,\\$ham_score_gigs,$1); + ++$ham_count_by_ip{$ip}; + } elsif (/(Not running SA because SAEximRunCond expanded to false|check skipped due to message size)/) { + ++$ham_count_by_ip{$ip}; + } + } + + # Look for Reject messages or blackholed messages (deliveries + # without a transport) + if ($flag eq "Re" || ($flag eq "=>" && ! /\\sT=\\S+/)) { + # Correct the IP address for rejects: + # rejected EHLO from my.test.net [10.0.0.5]: syntactically invalid argument(s): + # rejected EHLO from [10.0.0.6]: syntactically invalid argument(s): + $ip = $1 if ($ip eq "local" && /^rejected [HE][HE]LO from .*?(\[.+?\]):/); + if (/SpamAssassin/) { + ++$rejected_count_by_reason{"Rejected by SpamAssassin"}; + ++$rejected_count_by_ip{$ip}; + } + elsif ( + /(temporarily rejected [A-Z]*) .*?(: .*?)(:|\s*$)/ + ) { + ++$temporarily_rejected_count_by_reason{"\u$1$2"}; + ++$temporarily_rejected_count_by_ip{$ip}; + } + elsif ( + /(temporarily refused connection)/ + ) { + ++$temporarily_rejected_count_by_reason{"\u$1"}; + ++$temporarily_rejected_count_by_ip{$ip}; + } + elsif ( + /(listed at [^ ]+)/ || + /(Forged IP detected in HELO)/ || + /(Invalid domain or IP given in HELO\/EHLO)/ || + /(unqualified recipient rejected)/ || + /(closed connection (after|in response) .*?)\s*$/ || + /(sender rejected)/ || + # 2005-09-23 15:07:49 1EInHJ-0007Ex-Au H=(a.b.c) [10.0.0.1] F=<> rejected after DATA: This message contains a virus: (Eicar-Test-Signature) please scan your system. + # 2005-10-06 10:50:07 1ENRS3-0000Nr-Kt => blackhole (DATA ACL discarded recipients): This message contains a virus: (Worm.SomeFool.P) please scan your system. + / rejected after DATA: (.*)/ || + / (rejected DATA: .*)/ || + /.DATA ACL discarded recipients.: (.*)/ || + /rejected after DATA: (unqualified address not permitted)/ || + /(VRFY rejected)/ || +# /(sender verify (defer|fail))/i || + /(too many recipients)/ || + /(refused relay.*?) to/ || + /(rejected by non-SMTP ACL: .*)/ || + /(rejected by local_scan.*)/ || + # SMTP call from %s dropped: too many syntax or protocol errors (last command was "%s" + # SMTP call from %s dropped: too many nonmail commands + /(dropped: too many ((nonmail|unrecognized) commands|syntax or protocol errors))/ || + + # local_scan() function crashed with signal %d - message temporarily rejected + # local_scan() function timed out - message temporarily rejected + /(local_scan.. function .* - message temporarily rejected)/ || + # SMTP protocol synchronization error (input sent without waiting for greeting): rejected connection from %s + /(SMTP protocol .*?(error|violation))/ || + /(message too big)/ + ) { + ++$rejected_count_by_reason{"\u$1"}; + ++$rejected_count_by_ip{$ip}; + } + elsif (/rejected [HE][HE]LO from [^:]*: syntactically invalid argument/) { + ++$rejected_count_by_reason{"Rejected HELO/EHLO: syntactically invalid argument"}; + ++$rejected_count_by_ip{$ip}; + } + elsif (/response to "RCPT TO.*? was: (.*)/) { + ++$rejected_count_by_reason{"Response to RCPT TO was: $1"}; + ++$rejected_count_by_ip{$ip}; + } + elsif ( + /(lookup of host )\S+ (failed)/ || + + # rejected from <%s>%s%s%s%s: message too big: + /(rejected [A-Z]*) .*?(: .*?)(:|\s*$)/ || + # refused connection from %s (host_reject_connection) + # refused connection from %s (tcp wrappers) + /(refused connection )from.*? (\(.*)/ || + + # error from remote mailer after RCPT TO:: host a.b.c [10.0.0.1]: 450 : Recipient address rejected: Greylisted for 60 seconds + # error from remote mailer after MAIL FROM:<> SIZE=3468: host a.b.c [10.0.0.1]: 421 a.b.c has refused your connection because your server did not have a PTR record. + /(error from remote mailer after .*?:).*(: .*?)(:|\s*$)/ || + + # a.b.c F= rejected after DATA: "@" or "." expected after "Undisclosed-Recipient": failing address in "To" header is: + /rejected after DATA: ("." or "." expected).*?(: failing address in .*? header)/ || + + # connection from %s refused load average = %.2f + /(Connection )from.*? (refused: load average)/ || + # connection from %s refused (IP options) + # Connection from %s refused: too many connections + # connection from %s refused + /([Cc]onnection )from.*? (refused.*)/ || + # [10.0.0.1]: connection refused + /: (Connection refused)()/ + ) { + ++$rejected_count_by_reason{"\u$1$2"}; + ++$rejected_count_by_ip{$ip}; + } + elsif ( + # 2008-03-31 06:25:22 H=mail.densitron.com [216.70.140.224]:45386 temporarily rejected connection in "connect" ACL: too fast reconnects // .hs + # 2008-03-31 06:25:22 H=mail.densitron.com [216.70.140.224]:45386 temporarily rejected connection in "connect" ACL // .hs + /(temporarily rejected connection in .*?ACL:?.*)/ + ) { + ++$temporarily_rejected_count_by_ip{$ip}; + ++$temporarily_rejected_count_by_reason{"\u$1"}; + } + else { + ++$rejected_count_by_reason{Unknown}; + ++$rejected_count_by_ip{$ip}; + print STDERR "Unknown rejection: $_" if $debug; + } } }'; @@ -1672,36 +2477,41 @@ sub generate_parser { my(%defines_in_operation,$removing_lines,$processed_parser); foreach (split (/\n/,$parser)) { if ((/^\s*#\s*IFDEF\s*\((.*?)\)/i && ! eval $1) || - (/^\s*#\s*IFNDEF\s*\((.*?)\)/i && eval $1) ) { + (/^\s*#\s*IFNDEF\s*\((.*?)\)/i && eval $1) ) { $defines_in_operation{$1} = 1; $removing_lines = 1; } + # Convert constants. + while (/(\$[A-Z][A-Z_]*)\b/) { + my $constant = eval $1; + s/(\$[A-Z][A-Z_]*)\b/$constant/; + } + $processed_parser .= $_."\n" unless $removing_lines; if (/^\s*#\s*ENDIF\s*\((.*?)\)/i) { delete $defines_in_operation{$1}; unless (keys %defines_in_operation) { - $removing_lines = 0; + $removing_lines = 0; } } } - print STDERR "# START OF PARSER:\n$processed_parser\n# END OF PARSER\n\n" if $debug; + print STDERR "# START OF PARSER:$processed_parser\n# END OF PARSER\n\n" if $debug; return $processed_parser; } -=head2 parse(); - - parse($parser,\*FILEHANDLE); - -This subroutine accepts a parser and a filehandle from main and parses each -line. We store the results into global variables. - -=cut - +####################################################################### +# parse(); +# +# parse($parser,\*FILEHANDLE); +# +# This subroutine accepts a parser and a filehandle from main and parses each +# line. We store the results into global variables. +####################################################################### sub parse { my($parser,$fh) = @_; @@ -1717,77 +2527,103 @@ sub parse { -=head2 print_header(); - - print_header(); - -Print our headers and contents. - -=cut - +####################################################################### +# print_header(); +# +# print_header(); +# +# Print our headers and contents. +####################################################################### sub print_header { + my $title = "Exim statistics from $begin to $end"; - if ($html) { - print html_header($title); - print "\n
\n"; } - else { - print "\n$title\n"; + if ($xls_fh) + { + $ws_global->write($row++, $col+0, "Exim Statistics", $f_header1); + &set_worksheet_line($ws_global, $row, $col, ["from:", $begin, "to:", $end], $f_default); + $row+=2; } } -=head2 print_grandtotals(); - - print_grandtotals(); - -Print the grand totals. - -=cut - +####################################################################### +# print_grandtotals(); +# +# print_grandtotals(); +# +# Print the grand totals. +####################################################################### sub print_grandtotals { # Get the sender by headings and results. This is complicated as we can have # different numbers of columns. - my($sender_txt_header,$sender_html_header,$sender_txt_format,$sender_html_format); + my($sender_txt_header,$sender_txt_format,$sender_html_format); my(@received_totals,@delivered_totals); + my($row_tablehead, $row_max); + my(@col_headers) = ('TOTAL', 'Volume', 'Messages', 'Addresses'); + foreach ('Host','Domain','Email','Edomain') { next unless $do_sender{$_}; if ($merge_reports) { @@ -1798,37 +2634,40 @@ sub print_grandtotals { push(@received_totals,scalar(keys %{$received_data{$_}})); push(@delivered_totals,scalar(keys %{$delivered_data{$_}})); } - $sender_html_header .= "
${_}s%d%s
-$sender_html_header -EoText + my $txt_format1 = " %-16s %9s %6d %6s $sender_txt_format"; + my $txt_format2 = " %6d %4.1f%% %6d %4.1f%%", + my $htm_format1 = "$sender_html_format"; + my $htm_format2 = ""; - $format1 = "$sender_html_format"; - $format2 = ""; - } - else { + if ($txt_fh) { my $sender_spaces = " " x length($sender_txt_header); - print << "EoText"; - -Grand total summary -------------------- - $sender_spaces At least one address - TOTAL Volume Messages $sender_txt_header Delayed Failed -EoText - $format1 = " %-16s %9s %6d $sender_txt_format"; - $format2 = " %6d %4.1f%% %6d %4.1f%%", + print $txt_fh "\n"; + print $txt_fh "Grand total summary\n"; + print $txt_fh "-------------------\n"; + print $txt_fh " $sender_spaces At least one address\n"; + print $txt_fh " TOTAL Volume Messages Addresses $sender_txt_header Delayed Failed\n"; + } + if ($htm_fh) { + print $htm_fh "\n"; + print $htm_fh "

Grand total summary

\n"; + print $htm_fh "
TOTALVolumeMessagesAt least one addr
Delayed
At least one addr
Failed
%s%s%s%s%d%4.1f%%%d%4.1f%%
%s%s%d%d%4.1f%%%d%4.1f%%
\n"; + print $htm_fh "\n"; + } + if ($xls_fh) { + $ws_global->write($row++, 0, "Grand total summary", $f_header2); + $ws_global->write($row, 0, \@col_headers, $f_header2); + $ws_global->merge_range($row, scalar(@col_headers), $row, scalar(@col_headers)+1, "At least one addr Delayed", $f_header2_m); + $ws_global->merge_range($row, scalar(@col_headers)+2, $row, scalar(@col_headers)+3, "At least one addr Failed", $f_header2_m); + #$ws_global->write(++$row, scalar(@col_headers), ['Total','Percent','Total','Percent'], $f_header2); } + my($volume,$failed_count); if ($merge_reports) { $volume = volume_rounded($report_totals{Received}{Volume}, $report_totals{Received}{'Volume-gigs'}); @@ -1838,107 +2677,278 @@ EoText } else { $volume = volume_rounded($total_received_data, $total_received_data_gigs); - $failed_count = keys %had_error; + $failed_count = $message_errors; } { no integer; - printf("$format1$format2\n",'Received',$volume,$total_received_count, - @received_totals,$delayed_count, - ($total_received_count) ? ($delayed_count*100/$total_received_count) : 0, - $failed_count, - ($total_received_count) ? ($failed_count*100/$total_received_count) : 0); + + my @content=( + $volume,$total_received_count,'', + @received_totals, + $delayed_count, + ($total_received_count) ? ($delayed_count*100/$total_received_count) : 0, + $failed_count, + ($total_received_count) ? ($failed_count*100/$total_received_count) : 0 + ); + + printf $txt_fh ("$txt_format1$txt_format2\n", 'Received', @content) if $txt_fh; + printf $htm_fh ("$htm_format1$htm_format2\n", 'Received', @content) if $htm_fh; + if ($xls_fh) { + $ws_global->write(++$row, 0, 'Received', $f_default); + for (my $i=0; $i < scalar(@content); $i++) { + if ($i == 4 || $i == 6) { + $ws_global->write($row, $i+1, $content[$i]/100, $f_percent); + } + else { + $ws_global->write($row, $i+1, $content[$i], $f_default); + } + } + } } if ($merge_reports) { $volume = volume_rounded($report_totals{Delivered}{Volume}, $report_totals{Delivered}{'Volume-gigs'}); - $total_delivered_count = get_report_total($report_totals{Delivered},'Messages'); + $total_delivered_messages = get_report_total($report_totals{Delivered},'Messages'); + $total_delivered_addresses = get_report_total($report_totals{Delivered},'Addresses'); } else { $volume = volume_rounded($total_delivered_data, $total_delivered_data_gigs); } - printf("$format1\n\n",'Delivered',$volume,$total_delivered_count,@delivered_totals); - print "
" . join('',@col_headers) . "At least one addr
Delayed
At least one addr
Failed
\n" if $html; -} + my @content=($volume, $total_delivered_messages, $total_delivered_addresses, @delivered_totals); + printf $txt_fh ("$txt_format1\n", 'Delivered', @content) if $txt_fh; + printf $htm_fh ("$htm_format1\n", 'Delivered', @content) if $htm_fh; -=head2 print_user_patterns() + if ($xls_fh) { + $ws_global->write(++$row, 0, 'Delivered', $f_default); + for (my $i=0; $i < scalar(@content); $i++) { + $ws_global->write($row, $i+1, $content[$i], $f_default); + } + } - print_user_patterns(); + if ($merge_reports) { + foreach ('Rejects', 'Temp Rejects', 'Ham', 'Spam') { + my $messages = get_report_total($report_totals{$_},'Messages'); + my $addresses = get_report_total($report_totals{$_},'Addresses'); + if ($messages) { + @content = ($_, '', $messages, ''); + push(@content,get_report_total($report_totals{$_},'Hosts')) if $do_sender{Host}; + #These rows do not have entries for the following columns (if specified) + foreach ('Domain','Email','Edomain') { + push(@content,'') if $do_sender{$_}; + } -Print the counts of user specified patterns. + printf $txt_fh ("$txt_format1\n", @content) if $txt_fh; + printf $htm_fh ("$htm_format1\n", @content) if $htm_fh; + $ws_global->write(++$row, 0, \@content) if $xls_fh; + } + } + } + else { + foreach my $total_aref (['Rejects',\%rejected_count_by_ip], + ['Temp Rejects',\%temporarily_rejected_count_by_ip], + ['Ham',\%ham_count_by_ip], + ['Spam',\%spam_count_by_ip]) { + #Count the number of messages of this type. + my $messages = 0; + map {$messages += $_} values %{$total_aref->[1]}; + + if ($messages > 0) { + @content = ($total_aref->[0], '', $messages, ''); + + #Count the number of distinct IPs for the Hosts column. + push(@content,scalar(keys %{$total_aref->[1]})) if $do_sender{Host}; + + #These rows do not have entries for the following columns (if specified) + foreach ('Domain','Email','Edomain') { + push(@content,'') if $do_sender{$_}; + } -=cut + printf $txt_fh ("$txt_format1\n", @content) if $txt_fh; + printf $htm_fh ("$htm_format1\n", @content) if $htm_fh; + $ws_global->write(++$row, 0, \@content) if $xls_fh; + } + } + } + printf $txt_fh "\n" if $txt_fh; + printf $htm_fh "\n" if $htm_fh; + ++$row; +} + + +####################################################################### +# print_user_patterns() +# +# print_user_patterns(); +# +# Print the counts of user specified patterns. +####################################################################### sub print_user_patterns { - my($format1); + my $txt_format1 = " %-18s %6d"; + my $htm_format1 = "%s%d"; - if ($html) { - print "

User Specified Patterns

\n"; - print "\n"; - print ""; + + if ($txt_fh) { + print $txt_fh "Rejected mail by reason\n"; + print $txt_fh "-----------------------"; + print $txt_fh "\n Total\n"; + } + if ($htm_fh) { + print $htm_fh "

Rejected mail by reason

\n"; + print $htm_fh "
\n"; - print "\n"; - print "\n"; - $format1 = ""; + if ($txt_fh) { + print $txt_fh "User Specified Patterns\n"; + print $txt_fh "-----------------------"; + print $txt_fh "\n Total\n"; } - else { - print "User Specified Patterns\n"; - print "-----------------------"; - print "\n Total\n"; - $format1 = " %-18s %6d"; + if ($htm_fh) { + print $htm_fh "

User Specified Patterns

\n"; + print $htm_fh "
 Total
%s%d
\n"; + print $htm_fh "
\n"; + print $htm_fh "\n"; + print $htm_fh "\n"; + } + if ($xls_fh) { + $ws_global->write($row++, $col, "User Specified Patterns", $f_header2); + &set_worksheet_line($ws_global, $row++, 1, ["Total"], $f_headertab); } + my($key); if ($merge_reports) { # We are getting our data from previous reports. foreach $key (@user_descriptions) { my $count = get_report_total($report_totals{patterns}{$key},'Total'); - printf("$format1\n",$key,$count); + printf $txt_fh ("$txt_format1\n",$key,$count) if $txt_fh; + printf $htm_fh ("$htm_format1\n",$key,$count) if $htm_fh; + if ($xls_fh) + { + &set_worksheet_line($ws_global, $row++, 0, [$key,$count], $f_default); + } } } else { # We are getting our data from mainlog files. my $user_pattern_index = 0; foreach $key (@user_descriptions) { - printf("$format1\n",$key,$user_pattern_totals[$user_pattern_index]); + printf $txt_fh ("$txt_format1\n",$key,$user_pattern_totals[$user_pattern_index]) if $txt_fh; + printf $htm_fh ("$htm_format1\n",$key,$user_pattern_totals[$user_pattern_index]) if $htm_fh; + $ws_global->write($row++, 0, [$key,$user_pattern_totals[$user_pattern_index]]) if $xls_fh; $user_pattern_index++; } } - if ($html) { - print "
 Total
\n"; + print $txt_fh "\n" if $txt_fh; + print $htm_fh "
\n\n" if $htm_fh; + if ($xls_fh) + { + ++$row; + } + + if ($hist_opt > 0) { + my $user_pattern_index = 0; + foreach $key (@user_descriptions) { + print_histogram($key, 'occurence', @{$user_pattern_interval_count[$user_pattern_index]}); + $user_pattern_index++; + } } - print "\n"; } +####################################################################### +# print_rejects() +# +# print_rejects(); +# +# Print statistics about rejected mail. +####################################################################### +sub print_rejects { + my($format1,$reason); + + my $txt_format1 = " %-40s %6d"; + my $htm_format1 = "
%s%d
\n"; + print $htm_fh "\n"; + } + if ($xls_fh) { + $ws_global->write($row++, $col, "Rejected mail by reason", $f_header2); + &set_worksheet_line($ws_global, $row++, 1, ["Total"], $f_headertab); + } + -=head2 print_transport(); + my $href = ($merge_reports) ? $report_totals{rejected_mail_by_reason} : \%rejected_count_by_reason; + my(@chartdatanames, @chartdatavals_count); - print_transport(); + foreach $reason (top_n_sort($topcount, $href, undef, undef)) { + printf $txt_fh ("$txt_format1\n",$reason,$href->{$reason}) if $txt_fh; + printf $htm_fh ("$htm_format1\n",$reason,$href->{$reason}) if $htm_fh; + set_worksheet_line($ws_global, $row++, 0, [$reason,$href->{$reason}], $f_default) if $xls_fh; + push(@chartdatanames, $reason); + push(@chartdatavals_count, $href->{$reason}); + } -Print totals by transport. + $row++ if $xls_fh; + print $txt_fh "\n" if $txt_fh; + + if ($htm_fh) { + print $htm_fh "
 Total
"; + if ($HAVE_GD_Graph_pie && $charts && ($#chartdatavals_count > 0)) { + # calculate the graph + my @data = ( + \@chartdatanames, + \@chartdatavals_count + ); + my $graph = GD::Graph::pie->new(200, 200); + $graph->set( + x_label => 'Rejection Reasons', + y_label => 'Messages', + title => 'By count', + ); + my $gd = $graph->plot(\@data) or warn($graph->error); + if ($gd) { + open(IMG, ">$chartdir/rejections_count.png") or die "Could not write $chartdir/rejections_count.png: $!\n"; + binmode IMG; + print IMG $gd->png; + close IMG; + print $htm_fh ""; + } + } + print $htm_fh "
\n\n"; + } +} -=cut + + + +####################################################################### +# print_transport(); +# +# print_transport(); +# +# Print totals by transport. +####################################################################### sub print_transport { - my($format1); my(@chartdatanames); my(@chartdatavals_count); my(@chartdatavals_vol); - no integer; #Lose this for charting the data. + no integer; #Lose this for charting the data. - if ($html) { - print "

Deliveries by Transport

\n"; - print "\n"; - print "
\n"; - print "\n"; - print "\n"; - $format1 = ""; + my $txt_format1 = " %-18s %6s %6d"; + my $htm_format1 = ""; + + if ($txt_fh) { + print $txt_fh "Deliveries by transport\n"; + print $txt_fh "-----------------------"; + print $txt_fh "\n Volume Messages\n"; } - else { - print "Deliveries by transport\n"; - print "-----------------------"; - print "\n Volume Messages\n"; - $format1 = " %-18s %6s %6d"; + if ($htm_fh) { + print $htm_fh "

Deliveries by Transport

\n"; + print $htm_fh "
 VolumeMessages
%s%s%d
%s%s%d
\n"; + print $htm_fh "\n"; + } + if ($xls_fh) { + $ws_global->write(++$row, $col, "Deliveries by transport", $f_header2); + $ws_global->write(++$row, 1, ["Volume", "Messages"], $f_headertab); } my($key); @@ -1946,29 +2956,34 @@ sub print_transport { # We are getting our data from previous reports. foreach $key (sort keys %{$report_totals{transport}}) { my $count = get_report_total($report_totals{transport}{$key},'Messages'); - printf("$format1\n",$key, - volume_rounded($report_totals{transport}{$key}{Volume},$report_totals{transport}{$key}{'Volume-gigs'}), - $count); + my @content=($key, volume_rounded($report_totals{transport}{$key}{Volume}, + $report_totals{transport}{$key}{'Volume-gigs'}), $count); push(@chartdatanames, $key); push(@chartdatavals_count, $count); push(@chartdatavals_vol, $report_totals{transport}{$key}{'Volume-gigs'}*$gig + $report_totals{transport}{$key}{Volume} ); + printf $txt_fh ("$txt_format1\n", @content) if $txt_fh; + printf $htm_fh ("$htm_format1\n", @content) if $htm_fh; + $ws_global->write(++$row, 0, \@content) if $xls_fh; } } else { # We are getting our data from mainlog files. foreach $key (sort keys %transported_data) { - printf("$format1\n",$key, - volume_rounded($transported_data{$key},$transported_data_gigs{$key}), - $transported_count{$key}); + my @content=($key, volume_rounded($transported_data{$key},$transported_data_gigs{$key}), + $transported_count{$key}); push(@chartdatanames, $key); push(@chartdatavals_count, $transported_count{$key}); push(@chartdatavals_vol, $transported_data_gigs{$key}*$gig + $transported_data{$key}); + printf $txt_fh ("$txt_format1\n", @content) if $txt_fh; + printf $htm_fh ("$htm_format1\n", @content) if $htm_fh; + $ws_global->write(++$row, 0, \@content) if $xls_fh; } } - if ($html) { - print "
 VolumeMessages
\n"; - print "
\n"; - if ($HAVE_GD_Graph_pie && $charts) + print $txt_fh "\n" if $txt_fh; + if ($htm_fh) { + print $htm_fh "
"; + + if ($HAVE_GD_Graph_pie && $charts && ($#chartdatavals_count > 0)) { # calculate the graph my @data = ( @@ -1983,16 +2998,16 @@ sub print_transport { ); my $gd = $graph->plot(\@data) or warn($graph->error); if ($gd) { - open(IMG, ">$chartdir/transports_count.png") or die $!; - binmode IMG; - print IMG $gd->png; - close IMG; - print ""; + open(IMG, ">$chartdir/transports_count.png") or die "Could not write $chartdir/transports_count.png: $!\n"; + binmode IMG; + print IMG $gd->png; + close IMG; + print $htm_fh ""; } } - print "\n"; + print $htm_fh ""; - if ($HAVE_GD_Graph_pie && $charts) { + if ($HAVE_GD_Graph_pie && $charts && ($#chartdatavals_vol > 0)) { my @data = ( \@chartdatanames, \@chartdatavals_vol @@ -2003,192 +3018,263 @@ sub print_transport { ); my $gd = $graph->plot(\@data) or warn($graph->error); if ($gd) { - open(IMG, ">$chartdir/transports_vol.png") or die $!; - binmode IMG; - print IMG $gd->png; - close IMG; - print ""; + open(IMG, ">$chartdir/transports_vol.png") or die "Could not write $chartdir/transports_vol.png: $!\n"; + binmode IMG; + print IMG $gd->png; + close IMG; + print $htm_fh ""; } } - print "
\n"; + + print $htm_fh "\n\n"; } - print "\n"; } -=head2 print_relay(); - - print_relay(); - -Print our totals by relay. - -=cut - +####################################################################### +# print_relay(); +# +# print_relay(); +# +# Print our totals by relay. +####################################################################### sub print_relay { + my $row_print_relay=1; my $temp = "Relayed messages"; - print "

$temp

\n" if $html; + print $htm_fh "

$temp

\n" if $htm_fh; if (scalar(keys %relayed) > 0 || $relayed_unshown > 0) { my $shown = 0; my $spacing = ""; - my($format); + my $txt_format = "%7d %s\n => %s\n"; + my $htm_format = "%d%s%s\n"; - if ($html) { - print "\n"; - print "\n"; - $format = "\n"; + printf $txt_fh ("%s\n%s\n\n", $temp, "-" x length($temp)) if $txt_fh; + if ($htm_fh) { + print $htm_fh "
CountFromTo
%d%s%s
\n"; + print $htm_fh "\n"; } - else { - printf("%s\n%s\n\n", $temp, "-" x length($temp)); - $format = "%7d %s\n => %s\n"; + if ($xls_fh) { + $ws_relayed->write($row_print_relay++, $col, $temp, $f_header2); + &set_worksheet_line($ws_relayed, $row_print_relay++, 0, ["Count", "From", "To"], $f_headertab); } + my($key); foreach $key (sort keys %relayed) { my $count = $relayed{$key}; $shown += $count; $key =~ s/[HA]=//g; my($one,$two) = split(/=> /, $key); - printf($format, $count, $one, $two); + my @content=($count, $one, $two); + printf $txt_fh ($txt_format, @content) if $txt_fh; + printf $htm_fh ($htm_format, @content) if $htm_fh; + if ($xls_fh) + { + &set_worksheet_line($ws_relayed, $row_print_relay++, 0, \@content); + } $spacing = "\n"; } - print "
CountFromTo
\n

\n" if $html; - print "${spacing}Total: $shown (plus $relayed_unshown unshown)\n"; + + print $htm_fh "\n

\n" if $htm_fh; + print $txt_fh "${spacing}Total: $shown (plus $relayed_unshown unshown)\n\n" if $txt_fh; + print $htm_fh "${spacing}Total: $shown (plus $relayed_unshown unshown)\n\n" if $htm_fh; + if ($xls_fh) + { + &set_worksheet_line($ws_relayed, $row_print_relay++, 0, [$shown, "Sum of shown" ]); + &set_worksheet_line($ws_relayed, $row_print_relay++, 0, [$relayed_unshown, "unshown"]); + $row_print_relay++; + } } else { - print "No relayed messages\n"; - print "-------------------\n" unless $html; + print $txt_fh "No relayed messages\n-------------------\n\n" if $txt_fh; + print $htm_fh "No relayed messages\n\n" if $htm_fh; + if ($xls_fh) + { + $row_print_relay++; + } } - print "\n"; } -=head2 print_errors(); - - print_errors(); - -Print our errors. In HTML, we display them as a list rather than a table - -Netscape doesn't like large tables! - -=cut - +####################################################################### +# print_errors(); +# +# print_errors(); +# +# Print our errors. In HTML, we display them as a list rather than a table - +# Netscape doesn't like large tables! +####################################################################### sub print_errors { my $total_errors = 0; + $row=1; if (scalar(keys %errors_count) != 0) { my $temp = "List of errors"; - my($format); - if ($html) { - print "


$temp

\n"; - print "
  • Count - Error\n"; - $format = "
  • %d - %s\n"; + my $htm_format = "
  • %d - %s\n"; + + printf $txt_fh ("%s\n%s\n\n", $temp, "-" x length($temp)) if $txt_fh; + if ($htm_fh) { + print $htm_fh "

    $temp

    \n"; + print $htm_fh "
    • Count - Error\n"; } - else { - printf("%s\n%s\n\n", $temp, "-" x length($temp)); + if ($xls_fh) + { + $ws_errors->write($row++, 0, $temp, $f_header2); + &set_worksheet_line($ws_errors, $row++, 0, ["Count", "Error"], $f_headertab); } + my($key); foreach $key (sort keys %errors_count) { my $text = $key; chomp($text); - $text =~ s/\s\s+/ /g; #Convert multiple spaces to a single space. + $text =~ s/\s\s+/ /g; #Convert multiple spaces to a single space. $total_errors += $errors_count{$key}; - if ($html) { + + if ($txt_fh) { + printf $txt_fh ("%5d ", $errors_count{$key}); + my $text_remaining = $text; + while (length($text_remaining) > 65) { + my($first,$rest) = $text_remaining =~ /(.{50}\S*)\s+(.+)/; + last if !$first; + printf $txt_fh ("%s\n\t ", $first); + $text_remaining = $rest; + } + printf $txt_fh ("%s\n\n", $text_remaining); + } + + if ($htm_fh) { #Translate HTML tag characters. Sergey Sholokh. $text =~ s/\/\>\;/g; - printf($format,$errors_count{$key},$text); + printf $htm_fh ($htm_format,$errors_count{$key},$text); } - else { - printf("%5d ", $errors_count{$key}); - while (length($text) > 65) { - my($first,$rest) = $text =~ /(.{50}\S*)\s+(.+)/; - last if !$first; - printf("%s\n ", $first); - $text = $rest; - } - printf("%s\n\n", $text); + if ($xls_fh) + { + &set_worksheet_line($ws_errors, $row++, 0, [$errors_count{$key},$text]); } } - print "
    \n

    \n" if $html; $temp = "Errors encountered: $total_errors"; - print $temp,"\n"; - print "-" x length($temp),"\n" unless $html; + + if ($txt_fh) { + print $txt_fh $temp, "\n"; + print $txt_fh "-" x length($temp),"\n"; + } + if ($htm_fh) { + print $htm_fh "

\n

\n"; + print $htm_fh $temp, "\n"; + } + if ($xls_fh) + { + &set_worksheet_line($ws_errors, $row++, 0, [$total_errors, "Sum of Errors encountered"]); + } } } -=head2 parse_old_eximstat_reports(); - - parse_old_eximstat_reports($fh); - -Parse old eximstat output so we can merge daily stats to weekly stats and weekly to monthly etc. - -To test that the merging still works after changes, do something like the following. -All the diffs should produce no output. - - options='-bydomain -byemail -byhost -byedomain' - options="$options -pattern 'Completed Messages' /Completed/" - options="$options -pattern 'Received Messages' /<=/" - - ./eximstats $options mainlog > mainlog.txt - ./eximstats $options -merge mainlog.txt > mainlog.2.txt - diff mainlog.txt mainlog.2.txt - - ./eximstats $options -html mainlog > mainlog.html - ./eximstats $options -merge -html mainlog.txt > mainlog.2.html - diff mainlog.html mainlog.2.html - - ./eximstats $options -merge mainlog.html > mainlog.3.txt - diff mainlog.txt mainlog.3.txt - - ./eximstats $options -merge -html mainlog.html > mainlog.3.html - diff mainlog.html mainlog.3.html - - ./eximstats $options -nvr mainlog > mainlog.nvr.txt - ./eximstats $options -merge mainlog.nvr.txt > mainlog.4.txt - diff mainlog.txt mainlog.4.txt - - # double_mainlog.txt should have twice the values that mainlog.txt has. - ./eximstats $options mainlog mainlog > double_mainlog.txt - -=cut - +####################################################################### +# parse_old_eximstat_reports(); +# +# parse_old_eximstat_reports($fh); +# +# Parse old eximstat output so we can merge daily stats to weekly stats and weekly to monthly etc. +# +# To test that the merging still works after changes, do something like the following. +# All the diffs should produce no output. +# +# options='-bydomain -byemail -byhost -byedomain' +# options="$options -show_rt1,2,4 -show_dt 1,2,4" +# options="$options -pattern 'Completed Messages' /Completed/" +# options="$options -pattern 'Received Messages' /<=/" +# +# ./eximstats $options mainlog > mainlog.txt +# ./eximstats $options -merge mainlog.txt > mainlog.2.txt +# diff mainlog.txt mainlog.2.txt +# +# ./eximstats $options -html mainlog > mainlog.html +# ./eximstats $options -merge -html mainlog.txt > mainlog.2.html +# diff mainlog.html mainlog.2.html +# +# ./eximstats $options -merge mainlog.html > mainlog.3.txt +# diff mainlog.txt mainlog.3.txt +# +# ./eximstats $options -merge -html mainlog.html > mainlog.3.html +# diff mainlog.html mainlog.3.html +# +# ./eximstats $options -nvr mainlog > mainlog.nvr.txt +# ./eximstats $options -merge mainlog.nvr.txt > mainlog.4.txt +# diff mainlog.txt mainlog.4.txt +# +# # double_mainlog.txt should have twice the values that mainlog.txt has. +# ./eximstats $options mainlog mainlog > double_mainlog.txt +####################################################################### sub parse_old_eximstat_reports { my($fh) = @_; my(%league_table_value_entered, %league_table_value_was_zero, %table_order); + my(%user_pattern_index); + my $user_pattern_index = 0; + map {$user_pattern_index{$_} = $user_pattern_index++} @user_descriptions; + my $user_pattern_keys = join('|', @user_descriptions); + while (<$fh>) { + PARSE_OLD_REPORT_LINE: if (/Exim statistics from ([\d\-]+ [\d:]+(\s+[\+\-]\d+)?) to ([\d\-]+ [\d:]+(\s+[\+\-]\d+)?)/) { $begin = $1 if ($1 lt $begin); $end = $3 if ($3 gt $end); } elsif (/Grand total summary/) { - # Fill in $report_totals{Received|Delivered}{Volume|Messages|Hosts|Domains|...|Delayed|DelayedPercent|Failed|FailedPercent} - my(@fields); + # Fill in $report_totals{Received|Delivered}{Volume|Messages|Addresses|Hosts|Domains|...|Delayed|DelayedPercent|Failed|FailedPercent} + my(@fields, @delivered_fields); + my $doing_table = 0; while (<$fh>) { - $_ = html2txt($_); #Convert general HTML markup to text. - s/At least one addr//g; #Another part of the HTML output we don't want. - -# TOTAL Volume Messages Hosts Domains Delayed Failed -# Received 26MB 237 177 23 8 3.4% 28 11.8% -# Delivered 13MB 233 99 88 - if (/TOTAL\s+(.*?)\s*$/) { - @fields = split(/\s+/,$1); + $_ = html2txt($_); #Convert general HTML markup to text. + s/At least one addr//g; #Another part of the HTML output we don't want. + +# TOTAL Volume Messages Addresses Hosts Domains Delayed Failed +# Received 26MB 237 177 23 8 3.4% 28 11.8% +# Delivered 13MB 233 250 99 88 + if (/TOTAL\s+(.*?)\s*$/) { + $doing_table = 1; + @delivered_fields = split(/\s+/,$1); + #Delayed and Failed have two columns each, so add the extra field names in. - splice(@fields,-1,1,'DelayedPercent','Failed','FailedPercent'); - } - elsif (/(Received|Delivered)\s+(.*?)\s*$/) { - print STDERR "Parsing $_" if $debug; - add_to_totals($report_totals{$1},\@fields,$2); - } - last if (/Delivered/); #Last line of this section. + splice(@delivered_fields,-1,1,'DelayedPercent','Failed','FailedPercent'); + + # Addresses only figure in the Delivered row, so remove them from the + # normal fields. + @fields = grep !/Addresses/, @delivered_fields; + } + elsif (/(Received)\s+(.*?)\s*$/) { + print STDERR "Parsing $_" if $debug; + add_to_totals($report_totals{$1},\@fields,$2); + } + elsif (/(Delivered)\s+(.*?)\s*$/) { + print STDERR "Parsing $_" if $debug; + add_to_totals($report_totals{$1},\@delivered_fields,$2); + my $data = $2; + # If we're merging an old report which doesn't include addresses, + # then use the Messages field instead. + unless (grep(/Addresses/, @delivered_fields)) { + my %tmp; + line_to_hash(\%tmp,\@delivered_fields,$data); + add_to_totals($report_totals{Delivered},['Addresses'],$tmp{Messages}); + } + } + elsif (/(Temp Rejects|Rejects|Ham|Spam)\s+(.*?)\s*$/) { + print STDERR "Parsing $_" if $debug; + add_to_totals($report_totals{$1},['Messages','Hosts'],$2); + } + else { + last if $doing_table; + } } } @@ -2198,18 +3284,24 @@ sub parse_old_eximstat_reports { # Total # Description 85 - while (<$fh>) { last if (/Total/); } #Wait until we get the table headers. + while (<$fh>) { last if (/Total/); } #Wait until we get the table headers. while (<$fh>) { - print STDERR "Parsing $_" if $debug; - $_ = html2txt($_); #Convert general HTML markup to text. - if (/^\s*(.*?)\s+(\d+)\s*$/) { - $report_totals{patterns}{$1} = {} unless (defined $report_totals{patterns}{$1}); - add_to_totals($report_totals{patterns}{$1},['Total'],$2); - } - last if (/^\s*$/); #Finished if we have a blank line. + print STDERR "Parsing $_" if $debug; + $_ = html2txt($_); #Convert general HTML markup to text. + if (/^\s*(.*?)\s+(\d+)\s*$/) { + $report_totals{patterns}{$1} = {} unless (defined $report_totals{patterns}{$1}); + add_to_totals($report_totals{patterns}{$1},['Total'],$2); + } + last if (/^\s*$/); #Finished if we have a blank line. } } + elsif (/(^|

)($user_pattern_keys) per /o) { + # Parse User defined pattern histograms if they exist. + parse_histogram($fh, $user_pattern_interval_count[$user_pattern_index{$2}] ); + } + + elsif (/Deliveries by transport/i) { #Deliveries by transport #----------------------- @@ -2218,45 +3310,26 @@ sub parse_old_eximstat_reports { # address_pipe 655KB 1 # smtp 11MB 151 - while (<$fh>) { last if (/Volume/); } #Wait until we get the table headers. + while (<$fh>) { last if (/Volume/); } #Wait until we get the table headers. while (<$fh>) { - print STDERR "Parsing $_" if $debug; - $_ = html2txt($_); #Convert general HTML markup to text. - if (/(\S+)\s+(\d+\S*\s+\d+)/) { - $report_totals{transport}{$1} = {} unless (defined $report_totals{transport}{$1}); - add_to_totals($report_totals{transport}{$1},['Volume','Messages'],$2); - } - last if (/^\s*$/); #Finished if we have a blank line. + print STDERR "Parsing $_" if $debug; + $_ = html2txt($_); #Convert general HTML markup to text. + if (/(\S+)\s+(\d+\S*\s+\d+)/) { + $report_totals{transport}{$1} = {} unless (defined $report_totals{transport}{$1}); + add_to_totals($report_totals{transport}{$1},['Volume','Messages'],$2); + } + last if (/^\s*$/); #Finished if we have a blank line. } } - elsif (/(Messages received|Deliveries) per/) { -# Messages received per hour (each dot is 2 messages) -#--------------------------------------------------- -# -#00-01 106 ..................................................... -#01-02 103 ................................................... + elsif (/Messages received per/) { + parse_histogram($fh, \@received_interval_count); + } + elsif (/Deliveries per/) { + parse_histogram($fh, \@delivered_interval_count); + } - # Set a pointer to the interval array so we can use the same code - # block for both messages received and delivered. - my $interval_aref = ($1 eq 'Deliveries') ? \@delivered_interval_count : \@received_interval_count; - my $reached_table = 0; - while (<$fh>) { - $reached_table = 1 if (/^00/); - next unless $reached_table; - print STDERR "Parsing $_" if $debug; - if (/^(\d+):(\d+)\s+(\d+)/) { #hh:mm start time format ? - $$interval_aref[($1*60 + $2)/$hist_interval] += $3; - } - elsif (/^(\d+)-(\d+)\s+(\d+)/) { #hh-hh start-end time format ? - $$interval_aref[($1*60)/$hist_interval] += $3; - } - else { #Finished the table ? - last; - } - } - } - - elsif (/Time spent on the queue: (all messages|messages with at least one remote delivery)/) { + #elsif (/Time spent on the queue: (all messages|messages with at least one remote delivery)/) { + elsif (/(Time spent on the queue|Delivery times|Receipt times): ((\S+) messages|messages with at least one remote delivery)((<[^>]*>)*\s*)$/) { #Time spent on the queue: all messages #------------------------------------- # @@ -2268,34 +3341,68 @@ sub parse_old_eximstat_reports { # Set a pointer to the queue bin so we can use the same code # block for both all messages and remote deliveries. - my $bin_aref = ($1 eq 'all messages') ? \@queue_bin : \@remote_queue_bin; - my $reached_table = 0; + #my $bin_aref = ($1 eq 'all messages') ? \@qt_all_bin : \@qt_remote_bin; + my($bin_aref, $times_aref, $overflow_sref); + if ($1 eq 'Time spent on the queue') { + $times_aref = \@queue_times; + if ($2 eq 'all messages') { + $bin_aref = \@qt_all_bin; + $overflow_sref = \$qt_all_overflow; + } + else { + $bin_aref = \@qt_remote_bin; + $overflow_sref = \$qt_remote_overflow; + } + } + elsif ($1 eq 'Delivery times') { + $times_aref = \@delivery_times; + if ($2 eq 'all messages') { + $bin_aref = \@dt_all_bin; + $overflow_sref = \$dt_all_overflow; + } + else { + $bin_aref = \@dt_remote_bin; + $overflow_sref = \$dt_remote_overflow; + } + } + else { + unless (exists $rcpt_times_bin{$3}) { + initialise_rcpt_times($3); + } + $bin_aref = $rcpt_times_bin{$3}; + $times_aref = \@rcpt_times; + $overflow_sref = \$rcpt_times_overflow{$3}; + } + + + my ($blank_lines, $reached_table) = (0,0); while (<$fh>) { - $_ = html2txt($_); #Convert general HTML markup to text. - $reached_table = 1 if (/^\s*Under/); - next unless $reached_table; - my $previous_seconds_on_queue = 0; - if (/^\s*(Under|Over|)\s+(\d+[smhdw])\s+(\d+)/) { - print STDERR "Parsing $_" if $debug; - my($modifier,$formated_time,$count) = ($1,$2,$3); - my $seconds = unformat_time($formated_time); - my $time_on_queue = ($seconds + $previous_seconds_on_queue) / 2; - $previous_seconds_on_queue = $seconds; - $time_on_queue = $seconds * 2 if ($modifier eq 'Over'); - my($i); - for ($i = 0; $i <= $#queue_times; $i++) { - if ($time_on_queue < $queue_times[$i]) { - $$bin_aref[$i] += $count; - last; - } - } - # There's only one counter for messages going over the queue - # times so make sure we only count it once. - $queue_more_than += $count if (($bin_aref == \@queue_bin) && ($i > $#queue_times)); - } - else { - last; #Finished the table ? - } + $_ = html2txt($_); #Convert general HTML markup to text. + # The table is preceded by one blank line, and has one blank line + # following it. As the table may be empty, the best way to determine + # that we've finished it is to look for the second blank line. + ++$blank_lines if /^\s*$/; + last if ($blank_lines >=2); #Finished the table ? + $reached_table = 1 if (/\d/); + next unless $reached_table; + my $previous_seconds_on_queue = 0; + if (/^\s*(Under|Over|)\s+(\d+[smhdw])\s+(\d+)/) { + print STDERR "Parsing $_" if $debug; + my($modifier,$formatted_time,$count) = ($1,$2,$3); + my $seconds = unformat_time($formatted_time); + my $time_on_queue = ($seconds + $previous_seconds_on_queue) / 2; + $previous_seconds_on_queue = $seconds; + $time_on_queue = $seconds * 2 if ($modifier eq 'Over'); + my($i); + for ($i = 0; $i <= $#$times_aref; $i++) { + if ($time_on_queue < $times_aref->[$i]) { + $$bin_aref[$i] += $count; + last; + } + } + $$overflow_sref += $count if ($i > $#$times_aref); + + } } } @@ -2311,23 +3418,23 @@ sub parse_old_eximstat_reports { my $reached_table = 0; my($count,$sender); while (<$fh>) { - unless ($reached_table) { - last if (/No relayed messages/); - $reached_table = 1 if (/^\s*\d/ || />\d+(\d+)<.td>(.*?) ?<.td>(.*?)\s+(.*?)\s*$/) { - update_relayed($count,$sender,$1); - } - else { - last; #Finished the table ? - } + unless ($reached_table) { + last if (/No relayed messages/); + $reached_table = 1 if (/^\s*\d/ || />\d+(\d+)<.td>(.*?) ?<.td>(.*?)\s+(.*?)\s*$/) { + update_relayed($count,$sender,$1); + } + else { + last; #Finished the table ? + } } } @@ -2336,6 +3443,9 @@ sub parse_old_eximstat_reports { #------------------------------------- # # 48 1468KB local +# Could also have average values for HTML output. +# 48 1468KB 30KB local + my($category,$by_count_or_volume) = ($1,$2); #As we show 2 views of each table (by count and by volume), @@ -2343,97 +3453,149 @@ sub parse_old_eximstat_reports { #Set up a hash to record which entries we have already seen #and one to record which ones we are seeing for the first time. if ($by_count_or_volume =~ /count/) { - undef %league_table_value_entered; - undef %league_table_value_was_zero; - undef %table_order; + undef %league_table_value_entered; + undef %league_table_value_was_zero; + undef %table_order; } #As this section processes multiple different table categories, #set up pointers to the hashes to be updated. - my($count_href,$data_href,$data_gigs_href); + my($messages_href,$addresses_href,$data_href,$data_gigs_href); if ($category =~ /local sender/) { - $count_href = \%received_count_user; - $data_href = \%received_data_user; - $data_gigs_href = \%received_data_gigs_user; + $messages_href = \%received_count_user; + $addresses_href = undef; + $data_href = \%received_data_user; + $data_gigs_href = \%received_data_gigs_user; } elsif ($category =~ /sending (\S+?)s?\b/) { #Top 50 sending (host|domain|email|edomain)s #Top sending (host|domain|email|edomain) - $count_href = \%{$received_count{"\u$1"}}; - $data_href = \%{$received_data{"\u$1"}}; - $data_gigs_href = \%{$received_data_gigs{"\u$1"}}; + $messages_href = \%{$received_count{"\u$1"}}; + $data_href = \%{$received_data{"\u$1"}}; + $data_gigs_href = \%{$received_data_gigs{"\u$1"}}; } elsif ($category =~ /local destination/) { - $count_href = \%delivered_count_user; - $data_href = \%delivered_data_user; - $data_gigs_href = \%delivered_data_gigs_user; + $messages_href = \%delivered_messages_user; + $addresses_href = \%delivered_addresses_user; + $data_href = \%delivered_data_user; + $data_gigs_href = \%delivered_data_gigs_user; + } + elsif ($category =~ /local domain destination/) { + $messages_href = \%delivered_messages_local_domain; + $addresses_href = \%delivered_addresses_local_domain; + $data_href = \%delivered_data_local_domain; + $data_gigs_href = \%delivered_data_gigs_local_domain; } elsif ($category =~ /(\S+) destination/) { #Top 50 (host|domain|email|edomain) destinations #Top (host|domain|email|edomain) destination - $count_href = \%{$delivered_count{"\u$1"}}; - $data_href = \%{$delivered_data{"\u$1"}}; - $data_gigs_href = \%{$delivered_data_gigs{"\u$1"}}; + $messages_href = \%{$delivered_messages{"\u$1"}}; + $addresses_href = \%{$delivered_addresses{"\u$1"}}; + $data_href = \%{$delivered_data{"\u$1"}}; + $data_gigs_href = \%{$delivered_data_gigs{"\u$1"}}; + } + elsif ($category =~ /temporarily rejected ips/) { + $messages_href = \%temporarily_rejected_count_by_ip; + } + elsif ($category =~ /rejected ips/) { + $messages_href = \%rejected_count_by_ip; + } + elsif ($category =~ /non-rejected spamming ips/) { + $messages_href = \%spam_count_by_ip; + } + elsif ($category =~ /mail temporary rejection reasons/) { + $messages_href = \%temporarily_rejected_count_by_reason; + } + elsif ($category =~ /mail rejection reasons/) { + $messages_href = \%rejected_count_by_reason; } my $reached_table = 0; + my $row_re; while (<$fh>) { - $_ = html2txt($_); #Convert general HTML markup to text. - $reached_table = 1 if (/^\s*\d/); - next unless $reached_table; - if (/^\s*(\d+)\s+(\S+)\s*(.*?)\s*$/) { - my($count,$rounded_volume,$entry) = ($1,$2,$3); - #Note: $entry fields can be both null and can contain spaces. - - #Add the entry into the %table_order hash if it has a rounded volume (KB/MB/GB). - push(@{$table_order{$rounded_volume}{$by_count_or_volume}},$entry) if ($rounded_volume =~ /\D/); - - unless ($league_table_value_entered{$entry}) { - $league_table_value_entered{$entry} = 1; - unless ($$count_href{$entry}) { - $$count_href{$entry} = 0; - $$data_href{$entry} = 0; - $$data_gigs_href{$entry} = 0; - $league_table_value_was_zero{$entry} = 1; - } - - $$count_href{$entry} += $count; - #Add the rounded value to the data and data_gigs hashes. - un_round($rounded_volume,\$$data_href{$entry},\$$data_gigs_href{$entry}); - print STDERR "$category by $by_count_or_volume: added $count,$rounded_volume to $entry\n" if $debug; - } - } - else { #Finished the table ? - if ($by_count_or_volume =~ /volume/) { - #Add a few bytes to appropriate entries to preserve the order. - - my($rounded_volume); - foreach $rounded_volume (keys %table_order) { - #For each rounded volume, we want to create a list which has things - #ordered from the volume table at the front, and additional things - #from the count table ordered at the back. - @{$table_order{$rounded_volume}{volume}} = () unless defined $table_order{$rounded_volume}{volume}; - @{$table_order{$rounded_volume}{'message count'}} = () unless defined $table_order{$rounded_volume}{'message count'}; - my(@order,%mark); - map {$mark{$_} = 1} @{$table_order{$rounded_volume}{volume}}; - @order = @{$table_order{$rounded_volume}{volume}}; - map {push(@order,$_)} grep(!$mark{$_},@{$table_order{$rounded_volume}{'message count'}}); - - my $bonus_bytes = $#order; - $bonus_bytes = 511 if ($bonus_bytes > 511); #Don't go over the half-K boundary! - while (@order and ($bonus_bytes > 0)) { - my $entry = shift(@order); - if ($league_table_value_was_zero{$entry}) { - $$data_href{$entry} += $bonus_bytes; - print STDERR "$category by $by_count_or_volume: added $bonus_bytes bonus bytes to $entry\n" if $debug; - } - $bonus_bytes--; - } - } - } - - last; - } + # Watch out for empty tables. + goto PARSE_OLD_REPORT_LINE if (/

/ or (/^\s*[a-zA-Z]/ && !/^\s*Messages/)); + + $_ = html2txt($_); #Convert general HTML markup to text. + + # Messages Addresses Bytes Average + if (/^\s*Messages/) { + my $pattern = '^\s*(\d+)'; + $pattern .= (/Addresses/) ? '\s+(\d+)' : '()'; + $pattern .= (/Bytes/) ? '\s+([\dKMGB]+)' : '()'; + $pattern .= (/Average/) ? '\s+[\dKMGB]+' : ''; + $pattern .= '\s+(.*?)\s*$'; + $row_re = qr/$pattern/; + $reached_table = 1; + next; + } + next unless $reached_table; + + my($messages, $addresses, $rounded_volume, $entry); + + if (/$row_re/) { + ($messages, $addresses, $rounded_volume, $entry) = ($1, $2, $3, $4); + } + else { + #Else we have finished the table and we may need to do some + #kludging to retain the order of the entries. + + if ($by_count_or_volume =~ /volume/) { + #Add a few bytes to appropriate entries to preserve the order. + foreach $rounded_volume (keys %table_order) { + #For each rounded volume, we want to create a list which has things + #ordered from the volume table at the front, and additional things + #from the count table ordered at the back. + @{$table_order{$rounded_volume}{volume}} = () unless defined $table_order{$rounded_volume}{volume}; + @{$table_order{$rounded_volume}{'message count'}} = () unless defined $table_order{$rounded_volume}{'message count'}; + my(@order,%mark); + map {$mark{$_} = 1} @{$table_order{$rounded_volume}{volume}}; + @order = @{$table_order{$rounded_volume}{volume}}; + map {push(@order,$_)} grep(!$mark{$_},@{$table_order{$rounded_volume}{'message count'}}); + + my $bonus_bytes = $#order; + $bonus_bytes = 511 if ($bonus_bytes > 511); #Don't go over the half-K boundary! + while (@order and ($bonus_bytes > 0)) { + my $entry = shift(@order); + if ($league_table_value_was_zero{$entry}) { + $$data_href{$entry} += $bonus_bytes; + print STDERR "$category by $by_count_or_volume: added $bonus_bytes bonus bytes to $entry\n" if $debug; + } + $bonus_bytes--; + } + } + } + last; + } + + # Store a new table entry. + + # Add the entry into the %table_order hash if it has a rounded + # volume (KB/MB/GB). + push(@{$table_order{$rounded_volume}{$by_count_or_volume}},$entry) if ($rounded_volume =~ /\D/); + + unless ($league_table_value_entered{$entry}) { + $league_table_value_entered{$entry} = 1; + unless ($$messages_href{$entry}) { + $$messages_href{$entry} = 0; + $$addresses_href{$entry} = 0; + $$data_href{$entry} = 0; + $$data_gigs_href{$entry} = 0; + $league_table_value_was_zero{$entry} = 1; + } + + $$messages_href{$entry} += $messages; + + # When adding the addresses, be aware that we could be merging + # an old report which does not include addresses. In this case, + # we add the messages instead. + $$addresses_href{$entry} += ($addresses) ? $addresses : $messages; + + #Add the rounded value to the data and data_gigs hashes. + un_round($rounded_volume,\$$data_href{$entry},\$$data_gigs_href{$entry}) if $rounded_volume; + print STDERR "$category by $by_count_or_volume: added $messages,$rounded_volume to $entry\n" if $debug; + } + } } elsif (/List of errors/) { @@ -2450,48 +3612,76 @@ sub parse_old_eximstat_reports { my $reached_table = 0; my($count,$error,$blanks); while (<$fh>) { - $reached_table = 1 if (/^( *|
  • )(\d+)/); - next unless $reached_table; + $reached_table = 1 if (/^( *|
  • )(\d+)/); + next unless $reached_table; - s/^
  • (\d+) -/$1/; #Convert an HTML line to a text line. - $_ = html2txt($_); #Convert general HTML markup to text. + s/^
  • (\d+) -/$1/; #Convert an HTML line to a text line. + $_ = html2txt($_); #Convert general HTML markup to text. - if (/\t\s*(.*)/) { - $error .= ' ' . $1; #Join a multiline error. - } - elsif (/^\s*(\d+)\s+(.*)/) { - if ($error) { + if (/\t\s*(.*)/) { + $error .= ' ' . $1; #Join a multiline error. + } + elsif (/^\s*(\d+)\s+(.*)/) { + if ($error) { #Finished with a previous multiline error so save it. - $errors_count{$error} = 0 unless $errors_count{$error}; - $errors_count{$error} += $count; - } - ($count,$error) = ($1,$2); - } - elsif (/Errors encountered/) { - if ($error) { + $errors_count{$error} = 0 unless $errors_count{$error}; + $errors_count{$error} += $count; + } + ($count,$error) = ($1,$2); + } + elsif (/Errors encountered/) { + if ($error) { #Finished the section, so save our stored last error. - $errors_count{$error} = 0 unless $errors_count{$error}; - $errors_count{$error} += $count; - } - last; - } + $errors_count{$error} = 0 unless $errors_count{$error}; + $errors_count{$error} += $count; + } + last; + } } } } } +####################################################################### +# parse_histogram($fh, \@delivered_interval_count); +# Parse a histogram into the provided array of counters. +####################################################################### +sub parse_histogram { + my($fh, $counters_aref) = @_; + # Messages received per hour (each dot is 2 messages) + #--------------------------------------------------- + # + #00-01 106 ..................................................... + #01-02 103 ................................................... -=head2 update_relayed(); - - update_relayed($count,$sender,$recipient); - -Adds an entry into the %relayed hash. Currently only used when -merging reports. + my $reached_table = 0; + while (<$fh>) { + $reached_table = 1 if (/^00/); + next unless $reached_table; + print STDERR "Parsing $_" if $debug; + if (/^(\d+):(\d+)\s+(\d+)/) { #hh:mm start time format ? + $$counters_aref[($1*60 + $2)/$hist_interval] += $3 if $hist_opt; + } + elsif (/^(\d+)-(\d+)\s+(\d+)/) { #hh-hh start-end time format ? + $$counters_aref[($1*60)/$hist_interval] += $3 if $hist_opt; + } + else { #Finished the table ? + last; + } + } +} -=cut +####################################################################### +# update_relayed(); +# +# update_relayed($count,$sender,$recipient); +# +# Adds an entry into the %relayed hash. Currently only used when +# merging reports. +####################################################################### sub update_relayed { my($count,$sender,$recipient) = @_; @@ -2509,47 +3699,62 @@ sub update_relayed { } -=head2 add_to_totals(); - - add_to_totals(\%totals,\@keys,$values); - -Given a line of space seperated values, add them into the provided hash using @keys -as the hash keys. - -If the value contains a '%', then the value is set rather than added. Otherwise, we -convert the value to bytes and gigs. The gigs get added to I-gigs. - -=cut - +####################################################################### +# add_to_totals(); +# +# add_to_totals(\%totals,\@keys,$values); +# +# Given a line of space separated values, add them into the provided hash using @keys +# as the hash keys. +# +# If the value contains a '%', then the value is set rather than added. Otherwise, we +# convert the value to bytes and gigs. The gigs get added to I-gigs. +####################################################################### sub add_to_totals { my($totals_href,$keys_aref,$values) = @_; my(@values) = split(/\s+/,$values); - my(@keys) = @$keys_aref; #Make a copy as we destroy the one we use. - my($value); - foreach $value (@values) { - my $key = shift(@keys) or next; - if ($value =~ /%/) { - $$totals_href{$key} = $value; + + for(my $i = 0; $i < @values && $i < @$keys_aref; ++$i) { + my $key = $keys_aref->[$i]; + if ($values[$i] =~ /%/) { + $$totals_href{$key} = $values[$i]; } else { $$totals_href{$key} = 0 unless ($$totals_href{$key}); $$totals_href{"$key-gigs"} = 0 unless ($$totals_href{"$key-gigs"}); - un_round($value, \$$totals_href{$key}, \$$totals_href{"$key-gigs"}); - print STDERR "Added $value to $key - $$totals_href{$key} , " . $$totals_href{"$key-gigs"} . "GB.\n" if $debug; + un_round($values[$i], \$$totals_href{$key}, \$$totals_href{"$key-gigs"}); + print STDERR "Added $values[$i] to $key - $$totals_href{$key} , " . $$totals_href{"$key-gigs"} . "GB.\n" if $debug; } } } -=head2 get_report_total(); - $total = get_report_total(\%hash,$key); - -If %hash contains values split into Units and Gigs, we calculate and return - - $hash{$key} + 1024*1024*1024 * $hash{"${key}-gigs"} +####################################################################### +# line_to_hash(); +# +# line_to_hash(\%hash,\@keys,$line); +# +# Given a line of space separated values, set them into the provided hash +# using @keys as the hash keys. +####################################################################### +sub line_to_hash { + my($href,$keys_aref,$values) = @_; + my(@values) = split(/\s+/,$values); + for(my $i = 0; $i < @values && $i < @$keys_aref; ++$i) { + $$href{$keys_aref->[$i]} = $values[$i]; + } +} -=cut +####################################################################### +# get_report_total(); +# +# $total = get_report_total(\%hash,$key); +# +# If %hash contains values split into Units and Gigs, we calculate and return +# +# $hash{$key} + 1024*1024*1024 * $hash{"${key}-gigs"} +####################################################################### sub get_report_total { no integer; my($hash_ref,$key) = @_; @@ -2559,15 +3764,14 @@ sub get_report_total { return $$hash_ref{$key} || 0; } -=head2 html2txt(); - - $text_line = html2txt($html_line); - -Convert a line from html to text. Currently we just convert HTML tags to spaces -and convert >, <, and   tags back. - -=cut - +####################################################################### +# html2txt(); +# +# $text_line = html2txt($html_line); +# +# Convert a line from html to text. Currently we just convert HTML tags to spaces +# and convert >, <, and   tags back. +####################################################################### sub html2txt { ($_) = @_; @@ -2575,7 +3779,7 @@ sub html2txt { # words, so explicitly specify the HTML tags we will remove # (the ones used by this program). If someone is careless enough to have their # Userid the same as an HTML tag, there's not much we can do about it. - s/<\/?(html|head|title|body|h\d|ul|li|a\s+|table|tr|td|th|pre|hr|p|br)\b.*?>/ /og; + s/<\/?(html|head|title|body|h\d|ul|li|a\s+|table|tr|td|th|pre|hr|p|br)\b.*?>/ /g; s/\<\;/\/og; #Convert '>' to '>'. @@ -2583,24 +3787,23 @@ sub html2txt { return($_); } -=head2 get_next_arg(); - - $arg = get_next_arg(); - -Because eximstats arguments are often passed as variables, -we can't rely on shell parsing to deal with quotes. This -subroutine returns $ARGV[1] and does a shift. If $ARGV[1] -starts with a quote (' or "), and doesn't end in one, then -we append the next argument to it and shift again. We repeat -until we've got all of the argument. - -This isn't perfect as all white space gets reduced to one space, -but it's as good as we can get! If it's esential that spacing -be preserved precisely, then you get that by not using shell -variables. - -=cut - +####################################################################### +# get_next_arg(); +# +# $arg = get_next_arg(); +# +# Because eximstats arguments are often passed as variables, +# we can't rely on shell parsing to deal with quotes. This +# subroutine returns $ARGV[1] and does a shift. If $ARGV[1] +# starts with a quote (' or "), and doesn't end in one, then +# we append the next argument to it and shift again. We repeat +# until we've got all of the argument. +# +# This isn't perfect as all white space gets reduced to one space, +# but it's as good as we can get! If it's essential that spacing +# be preserved precisely, then you get that by not using shell +# variables. +####################################################################### sub get_next_arg { my $arg = ''; my $matched_pattern = 0; @@ -2620,6 +3823,55 @@ sub get_next_arg { return $arg; } +####################################################################### +# set_worksheet_line($ws_global, $startrow, $startcol, \@content, $format); +# +# set values to a sequence of cells in a row. +# +####################################################################### +sub set_worksheet_line { + my ($worksheet, $row, $col, $content, $format) = @_; + + foreach my $token (@$content) + { + $worksheet->write($row, $col++, $token, $format ); + } + +} + +####################################################################### +# @rcpt_times = parse_time_list($string); +# +# Parse a comma separated list of time values in seconds given by +# the user and fill an array. +# +# Return a default list if $string is undefined. +# Return () if $string eq '0'. +####################################################################### +sub parse_time_list { + my($string) = @_; + if (! defined $string) { + return(60, 5*60, 15*60, 30*60, 60*60, 3*60*60, 6*60*60, 12*60*60, 24*60*60); + } + my(@times) = split(/,/, $string); + foreach my $q (@times) { $q = eval($q) + 0 } + @times = sort { $a <=> $b } @times; + @times = () if ($#times == 0 && $times[0] == 0); + return(@times); +} + + +####################################################################### +# initialise_rcpt_times($protocol); +# Initialise an array of rcpt_times to 0 for the specified protocol. +####################################################################### +sub initialise_rcpt_times { + my($protocol) = @_; + for (my $i = 0; $i <= $#rcpt_times; ++$i) { + $rcpt_times_bin{$protocol}[$i] = 0; + } + $rcpt_times_overflow{$protocol} = 0; +} ################################################## @@ -2635,6 +3887,7 @@ $show_transport = 1; $topcount = 50; $local_league_table = 1; $include_remote_users = 0; +$include_original_destination = 0; $hist_opt = 1; $volume_rounding = 1; $localtime_offset = calculate_localtime_offset(); # PH/FANF @@ -2644,30 +3897,28 @@ $charts_option_specified = 0; $chartrel = "."; $chartdir = "."; -@queue_times = (60, 5*60, 15*60, 30*60, 60*60, 3*60*60, 6*60*60, - 12*60*60, 24*60*60); +@queue_times = parse_time_list(); +@rcpt_times = (); +@delivery_times = (); $last_offset = ''; $offset_seconds = 0; +$row=1; +$col=0; +$col_hist=0; +$run_hist=0; +my(%output_files); # What output files have been specified? + # Decode options -while (@ARGV > 0 && substr($ARGV[0], 0, 1) eq '-') - { +while (@ARGV > 0 && substr($ARGV[0], 0, 1) eq '-') { if ($ARGV[0] =~ /^\-h(\d+)$/) { $hist_opt = $1 } elsif ($ARGV[0] =~ /^\-ne$/) { $show_errors = 0 } - elsif ($ARGV[0] =~ /^\-nr(.?)(.*)\1$/) - { + elsif ($ARGV[0] =~ /^\-nr(.?)(.*)\1$/) { if ($1 eq "") { $show_relay = 0 } else { $relay_pattern = $2 } - } - elsif ($ARGV[0] =~ /^\-q([,\d\+\-\*\/]+)$/) - { - @queue_times = split(/,/, $1); - my($q); - foreach $q (@queue_times) { $q = eval($q) + 0 } - @queue_times = sort { $a <=> $b } @queue_times; - @queue_times = () if ($#queue_times == 0 && $queue_times[0] == 0); - } + } + elsif ($ARGV[0] =~ /^\-q([,\d\+\-\*\/]+)$/) { @queue_times = parse_time_list($1) } elsif ($ARGV[0] =~ /^-nt$/) { $show_transport = 0 } elsif ($ARGV[0] =~ /^\-nt(.?)(.*)\1$/) { @@ -2675,7 +3926,16 @@ while (@ARGV > 0 && substr($ARGV[0], 0, 1) eq '-') } elsif ($ARGV[0] =~ /^-t(\d+)$/) { $topcount = $1 } elsif ($ARGV[0] =~ /^-tnl$/) { $local_league_table = 0 } - elsif ($ARGV[0] =~ /^-html$/) { $html = 1 } + elsif ($ARGV[0] =~ /^-txt=?(\S*)$/) { $txt_fh = get_filehandle($1,\%output_files) } + elsif ($ARGV[0] =~ /^-html=?(\S*)$/) { $htm_fh = get_filehandle($1,\%output_files) } + elsif ($ARGV[0] =~ /^-xls=?(\S*)$/) { + if ($HAVE_Spreadsheet_WriteExcel) { + $xls_fh = get_filehandle($1,\%output_files); + } + else { + warn "WARNING: CPAN Module Spreadsheet::WriteExcel not installed. Obtain from www.cpan.org\n"; + } + } elsif ($ARGV[0] =~ /^-merge$/) { $merge_reports = 1 } elsif ($ARGV[0] =~ /^-charts$/) { $charts = 1; @@ -2684,13 +3944,18 @@ while (@ARGV > 0 && substr($ARGV[0], 0, 1) eq '-') } elsif ($ARGV[0] =~ /^-chartdir$/) { $chartdir = $ARGV[1]; shift; $charts_option_specified = 1; } elsif ($ARGV[0] =~ /^-chartrel$/) { $chartrel = $ARGV[1]; shift; $charts_option_specified = 1; } + elsif ($ARGV[0] =~ /^-include_original_destination$/) { $include_original_destination = 1 } elsif ($ARGV[0] =~ /^-cache$/) { } #Not currently used. elsif ($ARGV[0] =~ /^-byhost$/) { $do_sender{Host} = 1 } elsif ($ARGV[0] =~ /^-bydomain$/) { $do_sender{Domain} = 1 } elsif ($ARGV[0] =~ /^-byemail$/) { $do_sender{Email} = 1 } elsif ($ARGV[0] =~ /^-byemaildomain$/) { $do_sender{Edomain} = 1 } elsif ($ARGV[0] =~ /^-byedomain$/) { $do_sender{Edomain} = 1 } + elsif ($ARGV[0] =~ /^-bylocaldomain$/) { $do_local_domain = 1 } + elsif ($ARGV[0] =~ /^-emptyok$/) { $emptyOK = 1 } elsif ($ARGV[0] =~ /^-nvr$/) { $volume_rounding = 0 } + elsif ($ARGV[0] =~ /^-show_rt([,\d\+\-\*\/]+)?$/) { @rcpt_times = parse_time_list($1) } + elsif ($ARGV[0] =~ /^-show_dt([,\d\+\-\*\/]+)?$/) { @delivery_times = parse_time_list($1) } elsif ($ARGV[0] =~ /^-d$/) { $debug = 1 } elsif ($ARGV[0] =~ /^--?h(elp)?$/){ help() } elsif ($ARGV[0] =~ /^-t_remote_users$/) { $include_remote_users = 1 } @@ -2712,32 +3977,109 @@ while (@ARGV > 0 && substr($ARGV[0], 0, 1) eq '-') shift; } + # keep old default behaviour + if (! ($xls_fh or $htm_fh or $txt_fh)) { + $txt_fh = \*STDOUT; + } + # Check that all the charts options are specified. warn "-charts option not specified. Use -help for help.\n" if ($charts_option_specified && ! $charts); # Default to display tables by sending Host. $do_sender{Host} = 1 unless ($do_sender{Domain} || $do_sender{Email} || $do_sender{Edomain}); + # prepare xls Excel Workbook + if (defined $xls_fh) { + + # Create a new Excel workbook + $workbook = Spreadsheet::WriteExcel->new($xls_fh); + + # Add worksheets + $ws_global = $workbook->addworksheet('Exim Statistik'); + # show $ws_global as initial sheet + $ws_global->set_first_sheet(); + $ws_global->activate(); + + if ($show_relay) { + $ws_relayed = $workbook->addworksheet('Relayed Messages'); + $ws_relayed->set_column(1, 2, 80); + } + if ($show_errors) { + $ws_errors = $workbook->addworksheet('Errors'); + } + + + # set column widths + $ws_global->set_column(0, 2, 20); # Columns B-D width set to 30 + $ws_global->set_column(3, 3, 15); # Columns B-D width set to 30 + $ws_global->set_column(4, 4, 25); # Columns B-D width set to 30 + + # Define Formats + $f_default = $workbook->add_format(); + + $f_header1 = $workbook->add_format(); + $f_header1->set_bold(); + #$f_header1->set_color('red'); + $f_header1->set_size('15'); + $f_header1->set_valign(); + # $f_header1->set_align('center'); + # $ws_global->write($row++, 2, "Testing Headers 1", $f_header1); + + $f_header2 = $workbook->add_format(); + $f_header2->set_bold(); + $f_header2->set_size('12'); + $f_header2->set_valign(); + # $ws_global->write($row++, 2, "Testing Headers 2", $f_header2); + + # Create another header2 for use in merged cells. + $f_header2_m = $workbook->add_format(); + $f_header2_m->set_bold(); + $f_header2_m->set_size('8'); + $f_header2_m->set_valign(); + $f_header2_m->set_align('center'); + + $f_percent = $workbook->add_format(); + $f_percent->set_num_format('0.0%'); + $f_headertab = $workbook->add_format(); + $f_headertab->set_bold(); + $f_headertab->set_valign(); + # $ws_global->write($row++, 2, "Testing Headers tab", $f_headertab); + + } + + +# Initialise the queue/delivery/rcpt time counters. for (my $i = 0; $i <= $#queue_times; $i++) { - $queue_bin[$i] = 0; - $remote_queue_bin[$i] = 0; + $qt_all_bin[$i] = 0; + $qt_remote_bin[$i] = 0; } +for (my $i = 0; $i <= $#delivery_times; $i++) { + $dt_all_bin[$i] = 0; + $dt_remote_bin[$i] = 0; +} +initialise_rcpt_times('all'); -# Compute the number of slots for the histogram +# Compute the number of slots for the histogram if ($hist_opt > 0) { if ($hist_opt > 60 || 60 % $hist_opt != 0) { - print "Eximstats: -h must specify a factor of 60\n"; + print STDERR "Eximstats: -h must specify a factor of 60\n"; exit 1; } - $hist_interval = 60/$hist_opt; #Interval in minutes. - $hist_number = (24*60)/$hist_interval; #Number of intervals per day. + $hist_interval = 60/$hist_opt; #Interval in minutes. + $hist_number = (24*60)/$hist_interval; #Number of intervals per day. @received_interval_count = (0) x $hist_number; @delivered_interval_count = (0) x $hist_number; + my $user_pattern_index = 0; + for (my $user_pattern_index = 0; $user_pattern_index <= $#user_patterns; ++$user_pattern_index) { + @{$user_pattern_interval_count[$user_pattern_index]} = (0) x $hist_number; } + @dt_all_bin = (0) x $hist_number; + @dt_remote_bin = (0) x $hist_number; +} #$queue_unknown = 0; @@ -2747,15 +4089,20 @@ $total_received_count = 0; $total_delivered_data = 0; $total_delivered_data_gigs = 0; -$total_delivered_count = 0; +$total_delivered_messages = 0; +$total_delivered_addresses = 0; -$queue_more_than = 0; +$qt_all_overflow = 0; +$qt_remote_overflow = 0; +$dt_all_overflow = 0; +$dt_remote_overflow = 0; $delayed_count = 0; $relayed_unshown = 0; +$message_errors = 0; $begin = "9999-99-99 99:99:99"; $end = "0000-00-00 00:00:00"; my($section,$type); -foreach $section ('Received','Delivered') { +foreach $section ('Received','Delivered','Temp Rejects', 'Rejects','Ham','Spam') { foreach $type ('Volume','Messages','Delayed','Failed','Hosts','Domains','Emails','Edomains') { $report_totals{$section}{$type} = 0; } @@ -2771,20 +4118,20 @@ if (@ARGV) { foreach my $file (@ARGV) { if ($file =~ /\.gz/) { unless (open(FILE,"gunzip -c $file |")) { - print STDERR "Failed to gunzip -c $file: $!"; - next; + print STDERR "Failed to gunzip -c $file: $!"; + next; } } elsif ($file =~ /\.Z/) { unless (open(FILE,"uncompress -c $file |")) { - print STDERR "Failed to uncompress -c $file: $!"; - next; + print STDERR "Failed to uncompress -c $file: $!"; + next; } } else { unless (open(FILE,$file)) { - print STDERR "Failed to read $file: $!"; - next; + print STDERR "Failed to read $file: $!"; + next; } } #Now parse the filehandle, updating the global variables. @@ -2798,8 +4145,8 @@ else { } -if ($begin eq "9999-99-99 99:99:99") { - print "**** No valid log lines read\n"; +if ($begin eq "9999-99-99 99:99:99" && ! $emptyOK) { + print STDERR "**** No valid log lines read\n"; exit 1; } @@ -2810,20 +4157,36 @@ print_grandtotals(); # Print counts of user specified patterns if required. print_user_patterns() if @user_patterns; +# Print rejection reasons. +# print_rejects(); + # Print totals by transport if required. print_transport() if $show_transport; # Print the deliveries per interval as a histogram, unless configured not to. # First find the maximum in one interval and scale accordingly. if ($hist_opt > 0) { - print_histogram("Messages received", @received_interval_count); - print_histogram("Deliveries", @delivered_interval_count); + print_histogram("Messages received", 'message', @received_interval_count); + print_histogram("Deliveries", 'delivery', @delivered_interval_count); } # Print times on queue if required. if ($#queue_times >= 0) { - print_queue_times("all messages", \@queue_bin,$queue_more_than); - print_queue_times("messages with at least one remote delivery",\@remote_queue_bin,$queue_more_than); + print_duration_table("Time spent on the queue", "all messages", \@queue_times, \@qt_all_bin,$qt_all_overflow); + print_duration_table("Time spent on the queue", "messages with at least one remote delivery", \@queue_times, \@qt_remote_bin,$qt_remote_overflow); +} + +# Print delivery times if required. +if ($#delivery_times >= 0) { + print_duration_table("Delivery times", "all messages", \@delivery_times, \@dt_all_bin,$dt_all_overflow); + print_duration_table("Delivery times", "messages with at least one remote delivery", \@delivery_times, \@dt_remote_bin,$dt_remote_overflow); +} + +# Print rcpt times if required. +if ($#rcpt_times >= 0) { + foreach my $protocol ('all', grep(!/^all$/, sort keys %rcpt_times_bin)) { + print_duration_table("Receipt times", "$protocol messages", \@rcpt_times, $rcpt_times_bin{$protocol}, $rcpt_times_overflow{$protocol}); + } } # Print relay information if required. @@ -2831,26 +4194,53 @@ print_relay() if $show_relay; # Print the league tables, if topcount isn't zero. if ($topcount > 0) { + my($ws_rej, $ws_top50, $ws_rej_row, $ws_top50_row, $ws_temp_rej, $ws_temp_rej_row); + $ws_rej_row = $ws_temp_rej_row = $ws_top50_row = 0; + if ($xls_fh) { + $ws_top50 = $workbook->addworksheet('Deliveries'); + $ws_rej = $workbook->addworksheet('Rejections') if (%rejected_count_by_reason || %rejected_count_by_ip || %spam_count_by_ip); + $ws_temp_rej = $workbook->addworksheet('Temporary Rejections') if (%temporarily_rejected_count_by_reason || %temporarily_rejected_count_by_ip); + } + + print_league_table("mail rejection reason", \%rejected_count_by_reason, undef, undef, undef, $ws_rej, \$ws_rej_row) if %rejected_count_by_reason; + print_league_table("mail temporary rejection reason", \%temporarily_rejected_count_by_reason, undef, undef, undef, $ws_temp_rej, \$ws_temp_rej_row) if %temporarily_rejected_count_by_reason; + foreach ('Host','Domain','Email','Edomain') { next unless $do_sender{$_}; - print_league_table("sending \l$_", $received_count{$_}, $received_data{$_},$received_data_gigs{$_}); + print_league_table("sending \l$_", $received_count{$_}, undef, $received_data{$_},$received_data_gigs{$_}, $ws_top50, \$ws_top50_row); } - print_league_table("local sender", \%received_count_user, - \%received_data_user,\%received_data_gigs_user) if ($local_league_table || $include_remote_users); + print_league_table("local sender", \%received_count_user, undef, + \%received_data_user,\%received_data_gigs_user, $ws_top50, \$ws_top50_row) if (($local_league_table || $include_remote_users) && %received_count_user); foreach ('Host','Domain','Email','Edomain') { next unless $do_sender{$_}; - print_league_table("\l$_ destination", $delivered_count{$_}, $delivered_data{$_},$delivered_data_gigs{$_}); + print_league_table("\l$_ destination", $delivered_messages{$_}, $delivered_addresses{$_}, $delivered_data{$_},$delivered_data_gigs{$_}, $ws_top50, \$ws_top50_row); } - print_league_table("local destination", \%delivered_count_user, - \%delivered_data_user,\%delivered_data_gigs_user) if ($local_league_table || $include_remote_users); + print_league_table("local destination", \%delivered_messages_user, \%delivered_addresses_user, \%delivered_data_user,\%delivered_data_gigs_user, $ws_top50, \$ws_top50_row) if (($local_league_table || $include_remote_users) && %delivered_messages_user); + print_league_table("local domain destination", \%delivered_messages_local_domain, \%delivered_addresses_local_domain, \%delivered_data_local_domain,\%delivered_data_gigs_local_domain, $ws_top50, \$ws_top50_row) if (($local_league_table || $include_remote_users) && %delivered_messages_local_domain); + + print_league_table("rejected ip", \%rejected_count_by_ip, undef, undef, undef, $ws_rej, \$ws_rej_row) if %rejected_count_by_ip; + print_league_table("temporarily rejected ip", \%temporarily_rejected_count_by_ip, undef, undef, undef, $ws_rej, \$ws_rej_row) if %temporarily_rejected_count_by_ip; + print_league_table("non-rejected spamming ip", \%spam_count_by_ip, undef, undef, undef, $ws_rej, \$ws_rej_row) if %spam_count_by_ip; + } # Print the error statistics if required. print_errors() if $show_errors; -if ($html) { - print "\n\n" +print $htm_fh "\n\n" if $htm_fh; + + +$txt_fh->close if $txt_fh && ref $txt_fh; +$htm_fh->close if $htm_fh; + +if ($xls_fh) { + # close Excel Workbook + $ws_global->set_first_sheet(); + # FIXME: whyever - activate does not work :-/ + $ws_global->activate(); + $workbook->close(); } + # End of eximstats