From d7de9069dc2b39d3f5e5ec4ea28b54d2564c1ed8 Mon Sep 17 00:00:00 2001 From: Darold Gilles Date: Mon, 18 Jan 2016 21:45:17 +0100 Subject: [PATCH] Fix an other major bug introduced with journalctl code that prevent the use of the multiprocess. --- pgbadger | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pgbadger b/pgbadger index 72f16e8..703804a 100755 --- a/pgbadger +++ b/pgbadger @@ -1327,7 +1327,7 @@ if ( ($#given_log_files >= 0) && (($queue_size > 1) || ($job_per_file > 1)) ) { } # Do not use split method with compressed files - if ( ($queue_size > 1) && ($logfile !~ /\.(gz|bz2|zip|xz)$/i) && ($logfile !~ /\Q$journalctl_cmd\E/) ) { + if ( ($queue_size > 1) && ($logfile !~ /\.(gz|bz2|zip|xz)$/i) && (!$journalctl_cmd || ($logfile !~ /\Q$journalctl_cmd\E/)) ) { # Create multiple processes to parse one log file by chunks of data my @chunks = &split_logfile($logfile); &logmsg('DEBUG', "The following boundaries will be used to parse file $logfile, " . join('|', @chunks)); -- 2.50.1