From: Darold Gilles Date: Tue, 19 Jan 2016 20:13:21 +0000 (+0100) Subject: Add --rebuild option to be able to rebuild all html reports in an incremental output... X-Git-Tag: v8.0~11 X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=3aa8379e07e6209c6b301951f9d2a883cd48f7f6;p=pgbadger Add --rebuild option to be able to rebuild all html reports in an incremental output directory where there is binary data files. --- diff --git a/README b/README index 8aa1d4d..6251cda 100644 --- a/README +++ b/README @@ -137,6 +137,8 @@ SYNOPSIS --pid-dir dirpath : set the path of the directory where the pid file will be written to be able to run two pgbadger at the same time. + --rebuild : used to rebuild all html reports in incremental + output directories where there is binary data files. pgBadger is able to parse a remote log file using a passwordless ssh connection. Use the -r or --remote-host to set the host ip address or @@ -163,18 +165,18 @@ SYNOPSIS /var/log/postgresql.log cat /var/log/postgres.log | pgbadger - # Log prefix with stderr log output - perl pgbadger --prefix '%t [%p]: [%l-1] user=%u,db=%d,client=%h' + pgbadger --prefix '%t [%p]: [%l-1] user=%u,db=%d,client=%h' /pglog/postgresql-2012-08-21* - perl pgbadger --prefix '%m %u@%d %p %r %a : ' /pglog/postgresql.log + pgbadger --prefix '%m %u@%d %p %r %a : ' /pglog/postgresql.log # Log line prefix with syslog log output - perl pgbadger --prefix 'user=%u,db=%d,client=%h,appname=%a' + pgbadger --prefix 'user=%u,db=%d,client=%h,appname=%a' /pglog/postgresql-2012-08-21* # Use my 8 CPUs to parse my 10GB file faster, much faster - perl pgbadger -j 8 /pglog/postgresql-9.1-main.log + pgbadger -j 8 /pglog/postgresql-9.1-main.log Generate Tsung sessions XML file with select queries only: - perl pgbadger -S -o sessions.tsung --prefix '%t [%p]: [%l-1] user=%u,db=%d ' /pglog/postgresql-9.1.log + pgbadger -S -o sessions.tsung --prefix '%t [%p]: [%l-1] user=%u,db=%d ' /pglog/postgresql-9.1.log Reporting errors every week by cron job: @@ -212,15 +214,23 @@ SYNOPSIS You can also parse journalctl output just as if it was a log file: - perl pgbadger --journalctl 'journalctl -u postgresql-9.5' + pgbadger --journalctl 'journalctl -u postgresql-9.5' or worst, call it from a remote host: - perl pgbadger -r 192.168.1.159 --journalctl 'journalctl -u postgresql-9.5' + pgbadger -r 192.168.1.159 --journalctl 'journalctl -u postgresql-9.5' you don't need to specify any log file at command line, but if you have others PostgreSQL log files to parse, you can add them as usual. + To rebuild all incremantal html reports after, proceed as follow: + + rm /path/to/reports/*.js + rm /path/to/reports/*.css + pgbadger -X -I -O /path/to/reports/ --rebuild + + it will also update all ressources file (JS and CSS). + DESCRIPTION pgBadger is a PostgreSQL log analyzer built for speed with fully reports from your PostgreSQL log file. It's a single and small Perl script Perl diff --git a/doc/pgBadger.pod b/doc/pgBadger.pod index 2541b57..77000b6 100644 --- a/doc/pgBadger.pod +++ b/doc/pgBadger.pod @@ -139,6 +139,8 @@ Options: --pid-dir dirpath : set the path of the directory where the pid file will be written to be able to run two pgbadger at the same time. + --rebuild : used to rebuild all html reports in incremental + output directories where there is binary data files. pgBadger is able to parse a remote log file using a passwordless ssh connection. Use the -r or --remote-host to set the host ip address or hostname. There's also @@ -164,19 +166,19 @@ Examples: /var/log/postgresql.log cat /var/log/postgres.log | pgbadger - # Log prefix with stderr log output - perl pgbadger --prefix '%t [%p]: [%l-1] user=%u,db=%d,client=%h' + pgbadger --prefix '%t [%p]: [%l-1] user=%u,db=%d,client=%h' /pglog/postgresql-2012-08-21* - perl pgbadger --prefix '%m %u@%d %p %r %a : ' /pglog/postgresql.log + pgbadger --prefix '%m %u@%d %p %r %a : ' /pglog/postgresql.log # Log line prefix with syslog log output - perl pgbadger --prefix 'user=%u,db=%d,client=%h,appname=%a' + pgbadger --prefix 'user=%u,db=%d,client=%h,appname=%a' /pglog/postgresql-2012-08-21* # Use my 8 CPUs to parse my 10GB file faster, much faster - perl pgbadger -j 8 /pglog/postgresql-9.1-main.log + pgbadger -j 8 /pglog/postgresql-9.1-main.log Generate Tsung sessions XML file with select queries only: - perl pgbadger -S -o sessions.tsung --prefix '%t [%p]: [%l-1] user=%u,db=%d ' /pglog/postgresql-9.1.log + pgbadger -S -o sessions.tsung --prefix '%t [%p]: [%l-1] user=%u,db=%d ' /pglog/postgresql-9.1.log Reporting errors every week by cron job: @@ -213,15 +215,23 @@ solve this problem in a simpler way. You can also parse journalctl output just as if it was a log file: - perl pgbadger --journalctl 'journalctl -u postgresql-9.5' + pgbadger --journalctl 'journalctl -u postgresql-9.5' or worst, call it from a remote host: - perl pgbadger -r 192.168.1.159 --journalctl 'journalctl -u postgresql-9.5' + pgbadger -r 192.168.1.159 --journalctl 'journalctl -u postgresql-9.5' you don't need to specify any log file at command line, but if you have others PostgreSQL log files to parse, you can add them as usual. +To rebuild all incremantal html reports after, proceed as follow: + + rm /path/to/reports/*.js + rm /path/to/reports/*.css + pgbadger -X -I -O /path/to/reports/ --rebuild + +it will also update all ressources file (JS and CSS). + =head1 DESCRIPTION pgBadger is a PostgreSQL log analyzer built for speed with fully diff --git a/pgbadger b/pgbadger index 703804a..6e69b1e 100755 --- a/pgbadger +++ b/pgbadger @@ -272,6 +272,8 @@ my $log_duration = 0; my $logfile_list = ''; my $enable_checksum = 0; my $timezone = 0; +my $rebuild = 0; + my $NUMPROGRESS = 10000; my @DIMENSIONS = (800, 300); @@ -427,6 +429,7 @@ my $result = GetOptions( 'enable-checksum!' => \$enable_checksum, 'journalctl=s' => \$journalctl_cmd, 'pid-dir=s' => \$PID_DIR, + 'rebuild!' => \$rebuild, ); die "FATAL: use pgbadger --help\n" if (not $result); @@ -564,7 +567,7 @@ if ($logfile_list) { } # Logfile is a mandatory parameter when journalctl command is not set. -if ( ($#log_files < 0) && !$journalctl_cmd) { +if ( !$rebuild && ($#log_files < 0) && !$journalctl_cmd) { print STDERR "FATAL: you must give a log file as command line parameter.\n\n"; &usage(); } @@ -607,18 +610,25 @@ my $orphan_stderr_line = ''; # Set default format, with multiple files format will be autodetected each time. # This default format will be used when the autodetection fail. my $frmt = ''; -if (!$remote_host) { - if ($journalctl_cmd) { - $frmt = 'syslog2'; - } else { - $frmt = &autodetect_format($log_files[0]); +if (!$rebuild) { + if (!$remote_host) { + if ($journalctl_cmd) { + $frmt = 'syslog2'; + } else { + $frmt = &autodetect_format($log_files[0]); + } + } elsif (!$format) { + if ($journalctl_cmd) { + $frmt = 'syslog2'; + } else { + localdie("FATAL: you must give a log file format (-f or --format) when using remote connection.\n\n"); + } } -} elsif (!$format) { - if ($journalctl_cmd) { - $frmt = 'syslog2'; - } else { - localdie("FATAL: you must give a log file format (-f or --format) when using remote connection.\n\n"); +} else { + if (!$incremental) { + print STDERR "WARNING: --rebuild require incremental mode, activating it.\n" } + $incremental = 1; } $format ||= $frmt; @@ -1049,6 +1059,9 @@ my @top_tempfile_info = (); my @top_cancelled_info = (); my %drawn_graphs = (); +# Global output filehandle +my $fh = undef; + my $t0 = Benchmark->new; # Write resources files from __DATA__ section if they have not been already copied @@ -1079,6 +1092,52 @@ if ($incremental) { # Set default output format $extension = 'binary'; + if ($rebuild) { + + # Look for directory where report must be generated again + my @build_directories = (); + + # Find directories that shoud be rebuilt + unless(opendir(DIR, "$outdir")) { + localdie("Error: can't opendir $outdir: $!"); + } + my @dyears = grep { $_ =~ /^\d+$/ } readdir(DIR); + closedir DIR; + foreach my $y (sort { $a <=> $b } @dyears) { + unless(opendir(DIR, "$outdir/$y")) { + localdie("Error: can't opendir $outdir/$y: $!"); + } + my @dmonths = grep { $_ =~ /^\d+$/ } readdir(DIR); + closedir DIR; + foreach my $m (sort { $a <=> $b } @dmonths) { + unless(opendir(DIR, "$outdir/$y/$m")) { + localdie("Error: can't opendir $outdir/$y/$m: $!"); + } + my @ddays = grep { $_ =~ /^\d+$/ } readdir(DIR); + closedir DIR; + foreach my $d (sort { $a <=> $b } @ddays) { + unless(opendir(DIR, "$outdir/$y/$m/$d")) { + localdie("Error: can't opendir $outdir/$y/$m/$d: $!"); + } + my @binfiles = grep { $_ =~ /\.bin$/ } readdir(DIR); + closedir DIR; + push(@build_directories, "$y-$m-$d") if ($#binfiles >= 0); + } + } + } + + &build_incremental_reports(@build_directories); + + my $t2 = Benchmark->new; + my $td = timediff($t2, $t0); + &logmsg('DEBUG', "rebuilding reports took: " . timestr($td)); + + # Remove pidfile + unlink("$PID_DIR/pgbadger.pid"); + + exit 0; + } + } else { # Extra files for resources are not allowed without incremental mode @@ -1120,6 +1179,7 @@ if (!$noclean && $saved_last_line{datetime} && $outdir) { my $wn = &get_week_number($last_year, $last_month, $last_day); # Get the days of the current week where binary files must be preserved my @wdays = &get_wdays_per_month($wn - 1, "$last_year-$last_month"); + # Find obsolete dir days that shoud be cleaned unless(opendir(DIR, "$outdir")) { localdie("Error: can't opendir $outdir: $!"); @@ -1443,8 +1503,6 @@ my $t1 = Benchmark->new; my $td = timediff($t1, $t0); &logmsg('DEBUG', "the log statistics gathering took:" . timestr($td)); -# Global output filehandle -my $fh = undef; if (!$incremental && ($#given_log_files >= 0) ) { @@ -1490,8 +1548,7 @@ if (!$incremental && ($#given_log_files >= 0) ) { } elsif (!$incremental || !$noreport) { - # Build a report per day - my %weeks_directories = (); + # Look for directory where report must be generated my @build_directories = (); if (open(IN, "$last_parsed.tmp")) { while (my $l = ) { @@ -1505,204 +1562,7 @@ if (!$incremental && ($#given_log_files >= 0) ) { &logmsg('WARNING', "can't read file $last_parsed.tmp, $!"); &logmsg('HINT', "maybe there's no new entries in your log since last run."); } - foreach $incr_date (sort @build_directories) { - - $last_incr_date = $incr_date; - - # Set the path to binary files - my $bpath = $incr_date; - $bpath =~ s/\-/\//g; - $incr_date =~ /^(\d+)-(\d+)\-(\d+)$/; - - # Get the week number following the date - my $wn = &get_week_number($1, $2, $3); - $weeks_directories{$wn} = "$1-$2" if (!exists $weeks_directories{$wn}); - - # First clear previous stored statistics - &init_stats_vars(); - - # Load all data gathered by all the different processes - unless(opendir(DIR, "$outdir/$bpath")) { - localdie("Error: can't opendir $outdir/$bpath: $!"); - } - my @mfiles = grep { !/^\./ && ($_ =~ /\.bin$/) } readdir(DIR); - closedir DIR; - foreach my $f (@mfiles) { - my $fht = new IO::File; - $fht->open("< $outdir/$bpath/$f") or localdie("FATAL: can't open file $outdir/$bpath/$f, $!\n"); - &load_stats($fht); - $fht->close(); - } - - &logmsg('LOG', "Ok, generating HTML daily report into $outdir/$bpath/..."); - - $fh = new IO::File ">$outdir/$bpath/$outfile"; - if (not defined $fh) { - localdie("FATAL: can't write to $outdir/$bpath/$outfile, $!\n"); - } - # Create instance to prettify SQL query - if (!$noprettify) { - $sql_prettified = SQL::Beautify->new(keywords => \@beautify_pg_keywords); - } - &dump_as_html('../../..'); - $fh->close; - } - - # Build a report per week - foreach my $wn (sort { $a <=> $b } keys %weeks_directories) { - &init_stats_vars(); - - # Get all days of the current week - my @wdays = &get_wdays_per_month($wn - 1, $weeks_directories{$wn}); - my $wdir = ''; - - # Load data per day - foreach $incr_date (@wdays) { - my $bpath = $incr_date; - $bpath =~ s/\-/\//g; - $incr_date =~ /^(\d+)\-(\d+)\-(\d+)$/; - $wdir = "$1/week-$wn"; - - # Load all data gathered by all the differents processes - if (-e "$outdir/$bpath") { - unless(opendir(DIR, "$outdir/$bpath")) { - localdie("Error: can't opendir $outdir/$bpath: $!"); - } - my @mfiles = grep { !/^\./ && ($_ =~ /\.bin$/) } readdir(DIR); - closedir DIR; - foreach my $f (@mfiles) { - my $fht = new IO::File; - $fht->open("< $outdir/$bpath/$f") or localdie("FATAL: can't open file $outdir/$bpath/$f, $!\n"); - &load_stats($fht); - $fht->close(); - } - } - } - - &logmsg('LOG', "Ok, generating HTML weekly report into $outdir/$wdir/..."); - if (!-d "$outdir/$wdir") { - mkdir("$outdir/$wdir"); - } - $fh = new IO::File ">$outdir/$wdir/$outfile"; - if (not defined $fh) { - localdie("FATAL: can't write to $outdir/$wdir/$outfile, $!\n"); - } - # Create instance to prettify SQL query - if (!$noprettify) { - $sql_prettified = SQL::Beautify->new(keywords => \@beautify_pg_keywords); - } - &dump_as_html('../..'); - $fh->close; - - } - - &logmsg('LOG', "Ok, generating global index to access incremental reports..."); - - $fh = new IO::File ">$outdir/index.html"; - if (not defined $fh) { - localdie("FATAL: can't write to $outdir/index.html, $!\n"); - } - my $date = localtime(time); - my @tmpjscode = @jscode; - map { s/EDIT_URI/\./; } @tmpjscode; - my $local_title = 'Global Index on incremental reports'; - if ($report_title) { - $local_title = 'Global Index - ' . $report_title; - } - print $fh qq{ - - -pgBadger :: $local_title - - - - - - - -@tmpjscode - - - - - - -


-
- - - - -}; - # get year directories - unless(opendir(DIR, "$outdir")) { - localdie("Error: can't opendir $outdir: $!"); - } - my @dyears = grep { !/^\./ && /^\d{4}$/ } readdir(DIR); - closedir DIR; - foreach my $y (sort { $b <=> $a } @dyears) { - print $fh qq{ -

Year $y

- -}; - # foreach year directory look for week directories - unless(opendir(DIR, "$outdir/$y")) { - localdie("Error: can't opendir $outdir/$y: $!"); - } - - my @ymonths = grep { /^\d{2}$/ } readdir(DIR); - closedir DIR; - my $i = 1; - foreach my $m (sort {$a <=> $b } @ymonths) { - print $fh "\n"; - print $fh "\n\n" if ( ($i%4) == 0 ); - $i++; - } - print $fh qq{ -
", &get_calendar($y, $m), "
-}; - } - print $fh qq{ -
- - - -
- -
- - - -}; - - - - $fh->close; - + &build_incremental_reports(@build_directories); } my $t2 = Benchmark->new; @@ -1857,6 +1717,8 @@ Options: --pid-dir dirpath : set the path of the directory where the pid file will be written to be able to run two pgbadger at the same time. + --rebuild : used to rebuild all html reports in incremental + output directories where there is binary data files. pgBadger is able to parse a remote log file using a passwordless ssh connection. Use the -r or --remote-host to set the host ip address or hostname. There's also @@ -1931,20 +1793,229 @@ solve this problem in a simpler way. You can also parse journalctl output just as if it was a log file: - perl pgbadger --journalctl 'journalctl -u postgresql-9.5' + pgbadger --journalctl 'journalctl -u postgresql-9.5' or worst, call it from a remote host: - perl pgbadger -r 192.168.1.159 --journalctl 'journalctl -u postgresql-9.5' + pgbadger -r 192.168.1.159 --journalctl 'journalctl -u postgresql-9.5' you don't need to specify any log file at command line, but if you have other PostgreSQL log file to parse, you can add them as usual. +To rebuild all incremantal html reports after, proceed as follow: + + rm /path/to/reports/*.js + rm /path/to/reports/*.css + pgbadger -X -I -O /path/to/reports/ --rebuild + +it will also update all ressources file (JS and CSS). + }; exit 0; } +sub build_incremental_reports +{ + my @build_directories = @_; + + my %weeks_directories = (); + foreach $incr_date (sort @build_directories) { + + $last_incr_date = $incr_date; + + # Set the path to binary files + my $bpath = $incr_date; + $bpath =~ s/\-/\//g; + $incr_date =~ /^(\d+)-(\d+)\-(\d+)$/; + + # Get the week number following the date + my $wn = &get_week_number($1, $2, $3); + $weeks_directories{$wn} = "$1-$2" if ($rebuild || !exists $weeks_directories{$wn}); + + # First clear previous stored statistics + &init_stats_vars(); + + # Load all data gathered by all the different processes + unless(opendir(DIR, "$outdir/$bpath")) { + localdie("Error: can't opendir $outdir/$bpath: $!"); + } + my @mfiles = grep { !/^\./ && ($_ =~ /\.bin$/) } readdir(DIR); + closedir DIR; + foreach my $f (@mfiles) { + my $fht = new IO::File; + $fht->open("< $outdir/$bpath/$f") or localdie("FATAL: can't open file $outdir/$bpath/$f, $!\n"); + &load_stats($fht); + $fht->close(); + } + + &logmsg('LOG', "Ok, generating HTML daily report into $outdir/$bpath/..."); + + $fh = new IO::File ">$outdir/$bpath/$outfile"; + if (not defined $fh) { + localdie("FATAL: can't write to $outdir/$bpath/$outfile, $!\n"); + } + # Create instance to prettify SQL query + if (!$noprettify) { + $sql_prettified = SQL::Beautify->new(keywords => \@beautify_pg_keywords); + } + &dump_as_html('../../..'); + $fh->close; + } + + # Build a report per week + foreach my $wn (sort { $a <=> $b } keys %weeks_directories) { + &init_stats_vars(); + + # Get all days of the current week + my @wdays = &get_wdays_per_month($wn - 1, $weeks_directories{$wn}); + my $wdir = ''; + + # Load data per day + foreach $incr_date (@wdays) { + my $bpath = $incr_date; + $bpath =~ s/\-/\//g; + $incr_date =~ /^(\d+)\-(\d+)\-(\d+)$/; + $wdir = "$1/week-$wn"; + + # Load all data gathered by all the differents processes + if (-e "$outdir/$bpath") { + unless(opendir(DIR, "$outdir/$bpath")) { + localdie("Error: can't opendir $outdir/$bpath: $!"); + } + my @mfiles = grep { !/^\./ && ($_ =~ /\.bin$/) } readdir(DIR); + closedir DIR; + foreach my $f (@mfiles) { + my $fht = new IO::File; + $fht->open("< $outdir/$bpath/$f") or localdie("FATAL: can't open file $outdir/$bpath/$f, $!\n"); + &load_stats($fht); + $fht->close(); + } + } + } + + &logmsg('LOG', "Ok, generating HTML weekly report into $outdir/$wdir/..."); + if (!-d "$outdir/$wdir") { + mkdir("$outdir/$wdir"); + } + $fh = new IO::File ">$outdir/$wdir/$outfile"; + if (not defined $fh) { + localdie("FATAL: can't write to $outdir/$wdir/$outfile, $!\n"); + } + # Create instance to prettify SQL query + if (!$noprettify) { + $sql_prettified = SQL::Beautify->new(keywords => \@beautify_pg_keywords); + } + &dump_as_html('../..'); + $fh->close; + + } + + &logmsg('LOG', "Ok, generating global index to access incremental reports..."); + + $fh = new IO::File ">$outdir/index.html"; + if (not defined $fh) { + localdie("FATAL: can't write to $outdir/index.html, $!\n"); + } + my $date = localtime(time); + my @tmpjscode = @jscode; + map { s/EDIT_URI/\./; } @tmpjscode; + my $local_title = 'Global Index on incremental reports'; + if ($report_title) { + $local_title = 'Global Index - ' . $report_title; + } + print $fh qq{ + + +pgBadger :: $local_title + + + + + + + +@tmpjscode + + + + + + +


+
+ + + + +}; + # get year directories + unless(opendir(DIR, "$outdir")) { + localdie("Error: can't opendir $outdir: $!"); + } + my @dyears = grep { !/^\./ && /^\d{4}$/ } readdir(DIR); + closedir DIR; + foreach my $y (sort { $b <=> $a } @dyears) { + print $fh qq{ +

Year $y

+ +}; + # foreach year directory look for week directories + unless(opendir(DIR, "$outdir/$y")) { + localdie("Error: can't opendir $outdir/$y: $!"); + } + + my @ymonths = grep { /^\d{2}$/ } readdir(DIR); + closedir DIR; + my $i = 1; + foreach my $m (sort {$a <=> $b } @ymonths) { + print $fh "\n"; + print $fh "\n\n" if ( ($i%4) == 0 ); + $i++; + } + print $fh qq{ +
", &get_calendar($y, $m), "
+}; + } + print $fh qq{ +
+ + + +
+ +
+ + + +}; + $fh->close; +} + sub cleanup_directory { my ($dir, $remove_dir) = @_; @@ -4235,6 +4306,8 @@ sub print_global_information my $curdate = localtime(time); my $fmt_nlines = &comma_numbers($nlines); + my $t3 = Benchmark->new; + my $td = timediff($t3, $t0); my $total_time = timestr($td); $total_time =~ s/^([\.0-9]+) wallclock.*/$1/; $total_time = &convert_time($total_time * 1000);