From: Gilles Darold Date: Thu, 28 Jul 2016 13:57:13 +0000 (+0200) Subject: Revert "Fix description of -l | --last-parsed option." X-Git-Tag: v8.2~11 X-Git-Url: https://granicus.if.org/sourcecode?a=commitdiff_plain;h=19ac8da6fdcfaa16c6951e9a9b779a196b0746d8;p=pgbadger Revert "Fix description of -l | --last-parsed option." This reverts commit 1463129df1ff0961249a3ea6a0645f1489033e9b. --- diff --git a/README b/README index d91e13e..0de9f84 100644 --- a/README +++ b/README @@ -37,10 +37,11 @@ SYNOPSIS run as single process. -J | --Jobs number : number of log file to parse in parallel. Default is 1, run as single process. - -l | --last-parsed file: allow you to change the path to the file containing - the last parsed information. Default is LAST_PARSED - in the incremental output directory. - -L | --logfile-list file:file containing a list of log file to parse. + -l | --last-parsed file: allow incremental log parsing by registering the + last datetime and line parsed. Useful if you want + to watch errors since last run or if you want one + report per day with a log rotated each week. + -L | logfile-list file : file containing a list of log file to parse. -m | --maxlength size : maximum length of a query, it will be restricted to the given size. Default: no truncate -M | --no-multiline : do not collect multiline statement to avoid garbage @@ -165,18 +166,18 @@ SYNOPSIS /var/log/postgresql.log cat /var/log/postgres.log | pgbadger - # Log prefix with stderr log output - perl pgbadger --prefix '%t [%p]: [%l-1] user=%u,db=%d,client=%h' + pgbadger --prefix '%t [%p]: [%l-1] user=%u,db=%d,client=%h' /pglog/postgresql-2012-08-21* - perl pgbadger --prefix '%m %u@%d %p %r %a : ' /pglog/postgresql.log + pgbadger --prefix '%m %u@%d %p %r %a : ' /pglog/postgresql.log # Log line prefix with syslog log output - perl pgbadger --prefix 'user=%u,db=%d,client=%h,appname=%a' + pgbadger --prefix 'user=%u,db=%d,client=%h,appname=%a' /pglog/postgresql-2012-08-21* # Use my 8 CPUs to parse my 10GB file faster, much faster - perl pgbadger -j 8 /pglog/postgresql-9.1-main.log + pgbadger -j 8 /pglog/postgresql-9.1-main.log Generate Tsung sessions XML file with select queries only: - perl pgbadger -S -o sessions.tsung --prefix '%t [%p]: [%l-1] user=%u,db=%d ' /pglog/postgresql-9.1.log + pgbadger -S -o sessions.tsung --prefix '%t [%p]: [%l-1] user=%u,db=%d ' /pglog/postgresql-9.1.log Reporting errors every week by cron job: @@ -221,7 +222,7 @@ SYNOPSIS pgbadger -r 192.168.1.159 --journalctl 'journalctl -u postgresql-9.5' you don't need to specify any log file at command line, but if you have - other PostgreSQL log file to parse, you can add them as usual. + others PostgreSQL log files to parse, you can add them as usual. To rebuild all incremantal html reports after, proceed as follow: diff --git a/doc/pgBadger.pod b/doc/pgBadger.pod index c3e6e94..f53894b 100644 --- a/doc/pgBadger.pod +++ b/doc/pgBadger.pod @@ -39,10 +39,11 @@ Options: run as single process. -J | --Jobs number : number of log file to parse in parallel. Default is 1, run as single process. - -l | --last-parsed file: allow you to change the path to the file containing - the last parsed information. Default is LAST_PARSED - in the incremental output directory. - -L | --logfile-list file:file containing a list of log file to parse. + -l | --last-parsed file: allow incremental log parsing by registering the + last datetime and line parsed. Useful if you want + to watch errors since last run or if you want one + report per day with a log rotated each week. + -L | logfile-list file : file containing a list of log file to parse. -m | --maxlength size : maximum length of a query, it will be restricted to the given size. Default: no truncate -M | --no-multiline : do not collect multiline statement to avoid garbage @@ -137,7 +138,7 @@ Options: journalctl -u postgresql-9.5 --pid-dir dirpath : set the path of the directory where the pid file will be written to be able to run two pgbadger at - the same time. + the same time. --rebuild : used to rebuild all html reports in incremental output directories where there is binary data files. --pgbouncer-only : only show pgbouncer related menu in the header. @@ -166,19 +167,19 @@ Examples: /var/log/postgresql.log cat /var/log/postgres.log | pgbadger - # Log prefix with stderr log output - perl pgbadger --prefix '%t [%p]: [%l-1] user=%u,db=%d,client=%h' + pgbadger --prefix '%t [%p]: [%l-1] user=%u,db=%d,client=%h' /pglog/postgresql-2012-08-21* - perl pgbadger --prefix '%m %u@%d %p %r %a : ' /pglog/postgresql.log + pgbadger --prefix '%m %u@%d %p %r %a : ' /pglog/postgresql.log # Log line prefix with syslog log output - perl pgbadger --prefix 'user=%u,db=%d,client=%h,appname=%a' + pgbadger --prefix 'user=%u,db=%d,client=%h,appname=%a' /pglog/postgresql-2012-08-21* # Use my 8 CPUs to parse my 10GB file faster, much faster - perl pgbadger -j 8 /pglog/postgresql-9.1-main.log + pgbadger -j 8 /pglog/postgresql-9.1-main.log Generate Tsung sessions XML file with select queries only: - perl pgbadger -S -o sessions.tsung --prefix '%t [%p]: [%l-1] user=%u,db=%d ' /pglog/postgresql-9.1.log + pgbadger -S -o sessions.tsung --prefix '%t [%p]: [%l-1] user=%u,db=%d ' /pglog/postgresql-9.1.log Reporting errors every week by cron job: @@ -221,8 +222,8 @@ or worst, call it from a remote host: pgbadger -r 192.168.1.159 --journalctl 'journalctl -u postgresql-9.5' -you don't need to specify any log file at command line, but if you have other -PostgreSQL log file to parse, you can add them as usual. +you don't need to specify any log file at command line, but if you have others +PostgreSQL log files to parse, you can add them as usual. To rebuild all incremantal html reports after, proceed as follow: diff --git a/pgbadger b/pgbadger index b3fecec..a1bb71d 100755 --- a/pgbadger +++ b/pgbadger @@ -1654,9 +1654,10 @@ Options: run as single process. -J | --Jobs number : number of log file to parse in parallel. Default is 1, run as single process. - -l | --last-parsed file: allow you to change the path to the file containing - the last parsed information. Default is LAST_PARSED - in the incremental output directory. + -l | --last-parsed file: allow incremental log parsing by registering the + last datetime and line parsed. Useful if you want + to watch errors since last run or if you want one + report per day with a log rotated each week. -L | --logfile-list file:file containing a list of log file to parse. -m | --maxlength size : maximum length of a query, it will be restricted to the given size. Default: no truncate