}
@given_log_files = ();
push(@given_log_files, @tmpfilelist);
- $saved_last_line{current_pos}++;
+ $saved_last_line{current_pos}++ if ($saved_last_line{current_pos});
}
} else {
# Disable multi process when using ssh to parse remote log
}
# Do not use split method with compressed files
- if ( ($queue_size > 1) && ($logfile !~ /\.(gz|bz2|zip)/i) ) {
+ if ( ($queue_size > 1) && ($logfile !~ /\.(gz|bz2|zip|xz)$/i) ) {
# Create multiple processes to parse one log file by chunks of data
my @chunks = &split_logfile($logfile);
for (my $i = 0; $i < $#chunks; $i++) {
&logmsg('WARNING', "can't read file $last_parsed.tmp, $!");
&logmsg('HINT', "maybe there's no new entries in your log since last run.");
}
- foreach $incr_date (@build_directories) {
+ foreach $incr_date (sort @build_directories) {
$last_incr_date = $incr_date;
}
last if (($stop_offset > 0) && ($current_offset >= $stop_offset));
}
- $last_line{current_pos} = $current_offset if ($last_parsed);
+ if ($last_parsed) {
+ $last_line{current_pos} = $current_offset;
+ }
}
close $lfile;
$prefix_vars{'t_timestamp'} = $prefix_vars{'t_session_timestamp'};
}
}
+ next if (!$prefix_vars{'t_timestamp'});
# This file has already been parsed
if ($saved_date gt $prefix_vars{'t_timestamp'}) {
close($lfile);
my $totalsize = 0;
if (!$remote_host) {
$totalsize = (stat("$logf"))[7] || 0;
- } elsif ($logf !~ /\.(gz|bz2|zip|xz)/i) {
- &logmsg('DEBUG', "Looking for remote file size using command: $ssh_command \"ls -l $logf\" | awk '{print \$5}'");
+ } elsif ($logf !~ /\.(gz|bz2|zip|xz)$/i) {
+ &logmsg('DEBUG', "Looking for file size using command: $ssh_command \"ls -l $logf\" | awk '{print \$5}'");
$totalsize = `$ssh_command "ls -l $logf" | awk '{print \$5}'`;
chomp($totalsize);
if (!$totalsize) {
my $iscompressed = 1;
# Open a file handle
- if ($logf !~ /\.(gz|bz2|zip|xz)/i) {
+ if ($logf !~ /\.(gz|bz2|zip|xz)$/i) {
if (!$remote_host) {
open($lfile, $logf) || die "FATAL: cannot read log file $logf. $!\n";
} else {
# Real size of the file is unknown, try to find it
# bz2 does not report real size
$totalsize = 0;
- if ($logf =~ /\.(gz|zip|xz)/i) {
+ if ($logf =~ /\.(gz|zip|xz)$/i) {
my $cmd_file_size = $gzip_uncompress_size;
- if ($logf =~ /\.zip/i) {
+ if ($logf =~ /\.zip$/i) {
$cmd_file_size = $zip_uncompress_size;
- } elsif ($logf =~ /\.xz/i) {
+ } elsif ($logf =~ /\.xz$/i) {
$cmd_file_size = $xz_uncompress_size;
}
$cmd_file_size =~ s/\%f/$logf/g;
if (!$remote_host) {
- &logmsg('DEBUG', "Looking for remote file size using command: $cmd_file_size");
+ &logmsg('DEBUG', "Looking for file size using command: $cmd_file_size");
$totalsize = `$cmd_file_size`;
} else {
&logmsg('DEBUG', "Looking for remote file size using command: $ssh_command $cmd_file_size");
#If the file is very small, many jobs actually make the parsing take longer
#What is an acceptable file size????
$queue_size = 0;
- } elsif ($logf =~ /\.(gz|zip)/i) {
+ } elsif ($logf =~ /\.(gz|zip|xz)$/i) {
$totalsize = 0;
my $cmd_file_size = $gzip_uncompress_size;
- if ($logf =~ /\.zip/i) {
+ if ($logf =~ /\.zip$/i) {
$cmd_file_size = $zip_uncompress_size;
+ } elsif ($logf =~ /\.xz$/i) {
+ $cmd_file_size = $xz_uncompress_size;
}
$cmd_file_size =~ s/\%f/$logf/g;
$totalsize = `$cmd_file_size`;
chomp($totalsize);
$queue_size = 0;
- } elsif ($logf =~ /\.bz2/i) {
+ } elsif ($logf =~ /\.bz2$/i) {
$totalsize = 0;
$queue_size = 0;
}
my @chunks = (0);
my $i = 1;
- if ($last_parsed && $saved_last_line{current_pos} && ($#given_log_files == 0)) {
+ if ($last_parsed && $saved_last_line{current_pos} && ($saved_last_line{current_pos} < $totalsize)) {
$chunks[0] = $saved_last_line{current_pos};
$i = $saved_last_line{current_pos};
}