]> granicus.if.org Git - handbrake/commitdiff
json: automatically scan title when processing json jobs
authorjstebbins <jstebbins.hb@gmail.com>
Sat, 7 Mar 2015 22:25:48 +0000 (22:25 +0000)
committerjstebbins <jstebbins.hb@gmail.com>
Sat, 7 Mar 2015 22:25:48 +0000 (22:25 +0000)
Simplifies the WinGui.

This also changes how jobs are processed.  Creating the sub-jobs for
multiple passes is delayed until after scanning and immediately before
running the job.

Working status has also changed.  Sub-job passes are identified in status
with an ID that allows the frontend to definitively identify what pass
is in progress.

git-svn-id: svn://svn.handbrake.fr/HandBrake/trunk@6976 b64f7644-9d1e-0410-96f1-a4d463321fa5

23 files changed:
gtk/src/callbacks.c
gtk/src/hb-backend.c
gtk/src/hb-backend.h
libhb/batch.c
libhb/common.c
libhb/common.h
libhb/encavcodec.c
libhb/enctheora.c
libhb/encx264.c
libhb/encx265.c
libhb/hb.c
libhb/hb_json.c
libhb/hb_json.h
libhb/internal.h
libhb/muxavformat.c
libhb/muxcommon.c
libhb/reader.c
libhb/scan.c
libhb/stream.c
libhb/sync.c
libhb/work.c
macosx/HBQueueController.m
test/test.c

index 2fc2e5a2d022458ed803d6bc79f3f91bf32a006f..c086fcd473c346040aa2c3c28982fcd817d85089 100644 (file)
@@ -2702,6 +2702,54 @@ find_queue_job(GhbValue *queue, gint unique_id, GhbValue **job)
     return -1;
 }
 
+static void
+start_new_log(signal_user_data_t *ud, GhbValue *js)
+{
+    time_t  _now;
+    struct tm *now;
+    gchar *log_path, *pos, *destname, *basename, *dest_dir;
+
+    _now = time(NULL);
+    now = localtime(&_now);
+    destname = ghb_settings_get_string(js, "destination");
+    basename = g_path_get_basename(destname);
+    if (ghb_settings_get_boolean(ud->prefs, "EncodeLogLocation"))
+    {
+        dest_dir = g_path_get_dirname (destname);
+    }
+    else
+    {
+        dest_dir = ghb_get_user_config_dir("EncodeLogs");
+    }
+    g_free(destname);
+    pos = g_strrstr( basename, "." );
+    if (pos != NULL)
+    {
+        *pos = 0;
+    }
+    log_path = g_strdup_printf("%s/%s %d-%02d-%02d %02d-%02d-%02d.log",
+        dest_dir,
+        basename,
+        now->tm_year + 1900, now->tm_mon + 1, now->tm_mday,
+        now->tm_hour, now->tm_min, now->tm_sec);
+    g_free(basename);
+    g_free(dest_dir);
+    if (ud->job_activity_log)
+        g_io_channel_unref(ud->job_activity_log);
+    ud->job_activity_log = g_io_channel_new_file (log_path, "w", NULL);
+    if (ud->job_activity_log)
+    {
+        gchar *ver_str;
+
+        ver_str = g_strdup_printf("Handbrake Version: %s (%d)\n",
+                                    hb_get_version(NULL), hb_get_build(NULL));
+        g_io_channel_write_chars (ud->job_activity_log, ver_str,
+                                    -1, NULL, NULL);
+        g_free(ver_str);
+    }
+    g_free(log_path);
+}
+
 static void
 submit_job(signal_user_data_t *ud, GhbValue *settings)
 {
@@ -2723,7 +2771,8 @@ submit_job(signal_user_data_t *ud, GhbValue *settings)
 
     ghb_settings_set_int(settings, "job_unique_id", unique_id);
     ghb_settings_set_int(settings, "job_status", GHB_QUEUE_RUNNING);
-    ghb_add_job (settings, unique_id);
+    start_new_log(ud, settings);
+    ghb_add_job(settings, unique_id);
     ghb_start_queue();
 
     // Start queue activity spinner
@@ -2789,61 +2838,6 @@ prune_logs(signal_user_data_t *ud)
     ghb_preview_cleanup(ud);
 }
 
-static void
-queue_scan(signal_user_data_t *ud, GhbValue *js)
-{
-    gchar *path;
-    gint title_id;
-    time_t  _now;
-    struct tm *now;
-    gchar *log_path, *pos, *destname, *basename, *dest_dir;
-
-    _now = time(NULL);
-    now = localtime(&_now);
-    destname = ghb_settings_get_string(js, "destination");
-    basename = g_path_get_basename(destname);
-    if (ghb_settings_get_boolean(ud->prefs, "EncodeLogLocation"))
-    {
-        dest_dir = g_path_get_dirname (destname);
-    }
-    else
-    {
-        dest_dir = ghb_get_user_config_dir("EncodeLogs");
-    }
-    g_free(destname);
-    pos = g_strrstr( basename, "." );
-    if (pos != NULL)
-    {
-        *pos = 0;
-    }
-    log_path = g_strdup_printf("%s/%s %d-%02d-%02d %02d-%02d-%02d.log",
-        dest_dir,
-        basename,
-        now->tm_year + 1900, now->tm_mon + 1, now->tm_mday,
-        now->tm_hour, now->tm_min, now->tm_sec);
-    g_free(basename);
-    g_free(dest_dir);
-    if (ud->job_activity_log)
-        g_io_channel_unref(ud->job_activity_log);
-    ud->job_activity_log = g_io_channel_new_file (log_path, "w", NULL);
-    if (ud->job_activity_log)
-    {
-        gchar *ver_str;
-
-        ver_str = g_strdup_printf("Handbrake Version: %s (%d)\n",
-                                    hb_get_version(NULL), hb_get_build(NULL));
-        g_io_channel_write_chars (ud->job_activity_log, ver_str,
-                                    -1, NULL, NULL);
-        g_free(ver_str);
-    }
-    g_free(log_path);
-
-    path = ghb_settings_get_string( js, "source");
-    title_id = ghb_settings_get_int(js, "title");
-    ghb_backend_queue_scan(path, title_id);
-    g_free(path);
-}
-
 static gint
 queue_pending_count(GhbValue *queue)
 {
@@ -2901,7 +2895,7 @@ ghb_start_next_job(signal_user_data_t *ud)
         if (status == GHB_QUEUE_PENDING)
         {
             ghb_inhibit_gsm(ud);
-            queue_scan(ud, js);
+            submit_job(ud, js);
             ghb_update_pending(ud);
 
             // Start queue activity spinner
@@ -2936,14 +2930,9 @@ working_status_string(signal_user_data_t *ud, ghb_instance_status_t *status)
     gint qcount;
     gint index;
     GhbValue *js;
-    gboolean subtitle_scan = FALSE;
 
     qcount = ghb_array_len(ud->queue);
     index = find_queue_job(ud->queue, status->unique_id, &js);
-    if (js != NULL)
-    {
-        subtitle_scan = ghb_settings_get_boolean(js, "subtitle_scan");
-    }
     if (qcount > 1)
     {
         job_str = g_strdup_printf(_("job %d of %d, "), index+1, qcount);
@@ -2952,17 +2941,17 @@ working_status_string(signal_user_data_t *ud, ghb_instance_status_t *status)
     {
         job_str = g_strdup("");
     }
-    if (status->job_count > 1)
+    if (status->pass_count > 1)
     {
-        if (status->job_cur == 1 && subtitle_scan)
+        if (status->pass_id == HB_PASS_SUBTITLE)
         {
             task_str = g_strdup_printf(_("pass %d (subtitle scan) of %d, "),
-                status->job_cur, status->job_count);
+                status->pass, status->pass_count);
         }
         else
         {
             task_str = g_strdup_printf(_("pass %d of %d, "),
-                status->job_cur, status->job_count);
+                status->pass, status->pass_count);
         }
     }
     else
@@ -3215,9 +3204,6 @@ ghb_backend_events(signal_user_data_t *ud)
     else if (status.queue.state & GHB_STATE_SCANDONE)
     {
         ghb_clear_queue_state(GHB_STATE_SCANDONE);
-        usleep(2000000);
-        submit_job(ud, ud->current_job);
-        ghb_update_pending(ud);
     }
     else if (status.queue.state & GHB_STATE_PAUSED)
     {
@@ -3311,19 +3297,19 @@ ghb_backend_events(signal_user_data_t *ud)
         gtk_label_set_text (work_status, _("Muxing: This may take a while..."));
     }
 
-    if (status.scan.state & GHB_STATE_WORKING)
+    if (status.live.state & GHB_STATE_WORKING)
     {
         GtkProgressBar *live_progress;
         live_progress = GTK_PROGRESS_BAR(
             GHB_WIDGET(ud->builder, "live_encode_progress"));
-        status_str = working_status_string(ud, &status.scan);
+        status_str = working_status_string(ud, &status.live);
         gtk_progress_bar_set_text (live_progress, status_str);
-        gtk_progress_bar_set_fraction (live_progress, status.scan.progress);
+        gtk_progress_bar_set_fraction (live_progress, status.live.progress);
         g_free(status_str);
     }
-    if (status.scan.state & GHB_STATE_WORKDONE)
+    if (status.live.state & GHB_STATE_WORKDONE)
     {
-        switch( status.scan.error )
+        switch( status.live.error )
         {
             case GHB_ERROR_NONE:
             {
@@ -3334,7 +3320,7 @@ ghb_backend_events(signal_user_data_t *ud)
                 ghb_live_encode_done(ud, FALSE);
             } break;
         }
-        ghb_clear_scan_state(GHB_STATE_WORKDONE);
+        ghb_clear_live_state(GHB_STATE_WORKDONE);
     }
 }
 
index cc206b0c85dc5f0835cce7bffce0bd67d8fbc00c..ee09d247e4874f8eb493ead8d2086046e43b1cc0 100644 (file)
@@ -38,6 +38,7 @@
 #include "preview.h"
 #include "values.h"
 #include "lang.h"
+#include "jansson.h"
 
 typedef struct
 {
@@ -965,6 +966,7 @@ lookup_audio_lang_option(const GhbValue *glang)
 // Handle for libhb.  Gets set by ghb_backend_init()
 static hb_handle_t * h_scan = NULL;
 static hb_handle_t * h_queue = NULL;
+static hb_handle_t * h_live = NULL;
 
 extern void hb_get_temporary_directory(char path[512]);
 
@@ -2697,10 +2699,9 @@ ghb_build_advanced_opts_string(GhbValue *settings)
     }
 }
 
-void ghb_set_video_encoder_opts(hb_job_t *job, GhbValue *js)
+void ghb_set_video_encoder_opts(json_t *dict, GhbValue *js)
 {
     gint vcodec = ghb_settings_video_encoder_codec(js, "VideoEncoder");
-
     switch (vcodec)
     {
         case HB_VCODEC_X265:
@@ -2709,18 +2710,19 @@ void ghb_set_video_encoder_opts(hb_job_t *job, GhbValue *js)
             if (vcodec == HB_VCODEC_X264 &&
                 ghb_settings_get_boolean(js, "x264UseAdvancedOptions"))
             {
-                char *opts = ghb_settings_get_string(js, "x264Option");
-                hb_job_set_encoder_options(job, opts);
-                g_free(opts);
+                const char *opts;
+                opts = ghb_settings_get_const_string(js, "x264Option");
+                json_object_set_new(dict, "Options", json_string(opts));
             }
             else
             {
+                const char *preset, *tune, *profile, *level, *opts;
                 GString *str = g_string_new("");
-                char *preset = ghb_settings_get_string(js, "VideoPreset");
-                char *tune = ghb_settings_get_string(js, "VideoTune");
-                char *profile = ghb_settings_get_string(js, "VideoProfile");
-                char *level = ghb_settings_get_string(js, "VideoLevel");
-                char *opts = ghb_settings_get_string(js, "VideoOptionExtra");
+                preset = ghb_settings_get_const_string(js, "VideoPreset");
+                tune = ghb_settings_get_const_string(js, "VideoTune");
+                profile = ghb_settings_get_const_string(js, "VideoProfile");
+                level = ghb_settings_get_const_string(js, "VideoLevel");
+                opts = ghb_settings_get_const_string(js, "VideoOptionExtra");
                 char *tunes;
 
                 g_string_append_printf(str, "%s", tune);
@@ -2737,24 +2739,17 @@ void ghb_set_video_encoder_opts(hb_job_t *job, GhbValue *js)
                 }
                 tunes = g_string_free(str, FALSE);
 
-                hb_job_set_encoder_preset(job, preset);
-
+                if (preset != NULL)
+                    json_object_set_new(dict, "Preset", json_string(preset));
                 if (tunes != NULL && strcasecmp(tune, "none"))
-                    hb_job_set_encoder_tune(job, tunes);
-
+                    json_object_set_new(dict, "Tune", json_string(tunes));
                 if (profile != NULL && strcasecmp(profile, "auto"))
-                    hb_job_set_encoder_profile(job, profile);
-
+                    json_object_set_new(dict, "Profile", json_string(profile));
                 if (level != NULL && strcasecmp(level, "auto"))
-                    hb_job_set_encoder_level(job, level);
-
-                hb_job_set_encoder_options(job, opts);
+                    json_object_set_new(dict, "Level", json_string(level));
+                if (opts != NULL)
+                    json_object_set_new(dict, "Options", json_string(opts));
 
-                g_free(preset);
-                g_free(tune);
-                g_free(profile);
-                g_free(level);
-                g_free(opts);
                 g_free(tunes);
             }
         } break;
@@ -2763,12 +2758,12 @@ void ghb_set_video_encoder_opts(hb_job_t *job, GhbValue *js)
         case HB_VCODEC_FFMPEG_MPEG4:
         case HB_VCODEC_FFMPEG_VP8:
         {
-            gchar *opts = ghb_settings_get_string(js, "VideoOptionExtra");
+            const char *opts;
+            opts = ghb_settings_get_const_string(js, "VideoOptionExtra");
             if (opts != NULL && opts[0])
             {
-                hb_job_set_encoder_options(job, opts);
+                json_object_set_new(dict, "Options", json_string(opts));
             }
-            g_free(opts);
         } break;
 
         case HB_VCODEC_THEORA:
@@ -3144,11 +3139,13 @@ ghb_backend_init(gint debug)
     /* Init libhb */
     h_scan = hb_init( debug, 0 );
     h_queue = hb_init( debug, 0 );
+    h_live = hb_init( debug, 0 );
 }
 
 void
 ghb_backend_close()
 {
+    hb_close(&h_live);
     hb_close(&h_queue);
     hb_close(&h_scan);
     hb_global_close();
@@ -3199,6 +3196,12 @@ ghb_clear_scan_state(gint state)
     hb_status.scan.state &= ~state;
 }
 
+void
+ghb_clear_live_state(gint state)
+{
+    hb_status.live.state &= ~state;
+}
+
 void
 ghb_clear_queue_state(gint state)
 {
@@ -3223,186 +3226,116 @@ ghb_get_status(ghb_status_t *status)
     memcpy(status, &hb_status, sizeof(ghb_status_t));
 }
 
-void
-ghb_track_status()
+static void
+update_status(hb_state_t *state, ghb_instance_status_t *status)
 {
-    hb_state_t s_scan;
-    hb_state_t s_queue;
-
-    if (h_scan == NULL) return;
-    hb_get_state( h_scan, &s_scan );
-    switch( s_scan.state )
+    switch( state->state )
     {
-#define p s_scan.param.scanning
+#define p state->param.scanning
         case HB_STATE_SCANNING:
         {
-            hb_status.scan.state |= GHB_STATE_SCANNING;
-            hb_status.scan.title_count = p.title_count;
-            hb_status.scan.title_cur = p.title_cur;
-            hb_status.scan.preview_count = p.preview_count;
-            hb_status.scan.preview_cur = p.preview_cur;
-            hb_status.scan.progress = p.progress;
+            status->state |= GHB_STATE_SCANNING;
+            status->title_count = p.title_count;
+            status->title_cur = p.title_cur;
+            status->preview_count = p.preview_count;
+            status->preview_cur = p.preview_cur;
+            status->progress = p.progress;
         } break;
 #undef p
 
         case HB_STATE_SCANDONE:
         {
-            hb_status.scan.state &= ~GHB_STATE_SCANNING;
-            hb_status.scan.state |= GHB_STATE_SCANDONE;
+            status->state &= ~GHB_STATE_SCANNING;
+            status->state |= GHB_STATE_SCANDONE;
         } break;
 
-#define p s_scan.param.working
+#define p state->param.working
         case HB_STATE_WORKING:
-            hb_status.scan.state |= GHB_STATE_WORKING;
-            hb_status.scan.state &= ~GHB_STATE_PAUSED;
-            hb_status.scan.job_cur = p.job_cur;
-            hb_status.scan.job_count = p.job_count;
-            hb_status.scan.progress = p.progress;
-            hb_status.scan.rate_cur = p.rate_cur;
-            hb_status.scan.rate_avg = p.rate_avg;
-            hb_status.scan.hours = p.hours;
-            hb_status.scan.minutes = p.minutes;
-            hb_status.scan.seconds = p.seconds;
-            hb_status.scan.unique_id = p.sequence_id & 0xFFFFFF;
-            break;
-#undef p
-
-        case HB_STATE_PAUSED:
-            hb_status.scan.state |= GHB_STATE_PAUSED;
-            break;
-
-        case HB_STATE_MUXING:
-        {
-            hb_status.scan.state |= GHB_STATE_MUXING;
-        } break;
-
-#define p s_scan.param.workdone
-        case HB_STATE_WORKDONE:
-        {
-            hb_job_t *job;
-
-            hb_status.scan.state |= GHB_STATE_WORKDONE;
-            hb_status.scan.state &= ~GHB_STATE_MUXING;
-            hb_status.scan.state &= ~GHB_STATE_PAUSED;
-            hb_status.scan.state &= ~GHB_STATE_WORKING;
-            switch (p.error)
+            if (status->state & GHB_STATE_SCANNING)
             {
-            case HB_ERROR_NONE:
-                hb_status.scan.error = GHB_ERROR_NONE;
-                break;
-            case HB_ERROR_CANCELED:
-                hb_status.scan.error = GHB_ERROR_CANCELED;
-                break;
-            default:
-                hb_status.scan.error = GHB_ERROR_FAIL;
-                break;
+                status->state &= ~GHB_STATE_SCANNING;
+                status->state |= GHB_STATE_SCANDONE;
             }
-            // Delete all remaining jobs of this encode.
-            // An encode can be composed of multiple associated jobs.
-            // When a job is stopped, libhb removes it from the job list,
-            // but does not remove other jobs that may be associated with it.
-            // Associated jobs are taged in the sequence id.
-            while ((job = hb_job(h_scan, 0)) != NULL)
-                hb_rem( h_scan, job );
-        } break;
-#undef p
-    }
-    hb_get_state( h_queue, &s_queue );
-    switch( s_queue.state )
-    {
-#define p s_queue.param.scanning
-        case HB_STATE_SCANNING:
-        {
-            hb_status.queue.state |= GHB_STATE_SCANNING;
-            hb_status.queue.title_count = p.title_count;
-            hb_status.queue.title_cur = p.title_cur;
-            hb_status.queue.preview_count = p.preview_count;
-            hb_status.queue.preview_cur = p.preview_cur;
-            hb_status.queue.progress = p.progress;
-        } break;
-#undef p
-
-        case HB_STATE_SCANDONE:
-        {
-            hb_status.queue.state &= ~GHB_STATE_SCANNING;
-            hb_status.queue.state |= GHB_STATE_SCANDONE;
-        } break;
-
-#define p s_queue.param.working
-        case HB_STATE_WORKING:
-            hb_status.queue.state |= GHB_STATE_WORKING;
-            hb_status.queue.state &= ~GHB_STATE_PAUSED;
-            hb_status.queue.state &= ~GHB_STATE_SEARCHING;
-            hb_status.queue.job_cur = p.job_cur;
-            hb_status.queue.job_count = p.job_count;
-            hb_status.queue.progress = p.progress;
-            hb_status.queue.rate_cur = p.rate_cur;
-            hb_status.queue.rate_avg = p.rate_avg;
-            hb_status.queue.hours = p.hours;
-            hb_status.queue.minutes = p.minutes;
-            hb_status.queue.seconds = p.seconds;
-            hb_status.queue.unique_id = p.sequence_id & 0xFFFFFF;
+            status->state |= GHB_STATE_WORKING;
+            status->state &= ~GHB_STATE_PAUSED;
+            status->state &= ~GHB_STATE_SEARCHING;
+            status->pass = p.pass;
+            status->pass_count = p.pass_count;
+            status->pass_id = p.pass_id;
+            status->progress = p.progress;
+            status->rate_cur = p.rate_cur;
+            status->rate_avg = p.rate_avg;
+            status->hours = p.hours;
+            status->minutes = p.minutes;
+            status->seconds = p.seconds;
+            status->unique_id = p.sequence_id & 0xFFFFFF;
             break;
 
         case HB_STATE_SEARCHING:
-            hb_status.queue.state |= GHB_STATE_SEARCHING;
-            hb_status.queue.state &= ~GHB_STATE_WORKING;
-            hb_status.queue.state &= ~GHB_STATE_PAUSED;
-            hb_status.queue.job_cur = p.job_cur;
-            hb_status.queue.job_count = p.job_count;
-            hb_status.queue.progress = p.progress;
-            hb_status.queue.rate_cur = p.rate_cur;
-            hb_status.queue.rate_avg = p.rate_avg;
-            hb_status.queue.hours = p.hours;
-            hb_status.queue.minutes = p.minutes;
-            hb_status.queue.seconds = p.seconds;
-            hb_status.queue.unique_id = p.sequence_id & 0xFFFFFF;
+            status->state |= GHB_STATE_SEARCHING;
+            status->state &= ~GHB_STATE_WORKING;
+            status->state &= ~GHB_STATE_PAUSED;
+            status->pass = p.pass;
+            status->pass_count = p.pass_count;
+            status->pass_id = p.pass_id;
+            status->progress = p.progress;
+            status->rate_cur = p.rate_cur;
+            status->rate_avg = p.rate_avg;
+            status->hours = p.hours;
+            status->minutes = p.minutes;
+            status->seconds = p.seconds;
+            status->unique_id = p.sequence_id & 0xFFFFFF;
             break;
 #undef p
 
         case HB_STATE_PAUSED:
-            hb_status.queue.state |= GHB_STATE_PAUSED;
+            status->state |= GHB_STATE_PAUSED;
             break;
 
         case HB_STATE_MUXING:
         {
-            hb_status.queue.state |= GHB_STATE_MUXING;
+            status->state |= GHB_STATE_MUXING;
         } break;
 
-#define p s_queue.param.workdone
+#define p state->param.workdone
         case HB_STATE_WORKDONE:
         {
-            hb_job_t *job;
-
-            hb_status.queue.state |= GHB_STATE_WORKDONE;
-            hb_status.queue.state &= ~GHB_STATE_MUXING;
-            hb_status.queue.state &= ~GHB_STATE_PAUSED;
-            hb_status.queue.state &= ~GHB_STATE_WORKING;
-            hb_status.queue.state &= ~GHB_STATE_SEARCHING;
+            status->state |= GHB_STATE_WORKDONE;
+            status->state &= ~GHB_STATE_MUXING;
+            status->state &= ~GHB_STATE_PAUSED;
+            status->state &= ~GHB_STATE_WORKING;
+            status->state &= ~GHB_STATE_SEARCHING;
             switch (p.error)
             {
             case HB_ERROR_NONE:
-                hb_status.queue.error = GHB_ERROR_NONE;
+                status->error = GHB_ERROR_NONE;
                 break;
             case HB_ERROR_CANCELED:
-                hb_status.queue.error = GHB_ERROR_CANCELED;
+                status->error = GHB_ERROR_CANCELED;
                 break;
             default:
-                hb_status.queue.error = GHB_ERROR_FAIL;
+                status->error = GHB_ERROR_FAIL;
                 break;
             }
-            // Delete all remaining jobs of this encode.
-            // An encode can be composed of multiple associated jobs.
-            // When a job is stopped, libhb removes it from the job list,
-            // but does not remove other jobs that may be associated with it.
-            // Associated jobs are taged in the sequence id.
-            while ((job = hb_job(h_queue, 0)) != NULL)
-                hb_rem( h_queue, job );
         } break;
 #undef p
     }
 }
 
+void
+ghb_track_status()
+{
+    hb_state_t state;
+
+    if (h_scan == NULL) return;
+    hb_get_state( h_scan, &state );
+    update_status(&state, &hb_status.scan);
+    hb_get_state( h_queue, &state );
+    update_status(&state, &hb_status.queue);
+    hb_get_state( h_live, &state );
+    update_status(&state, &hb_status.live);
+}
+
 hb_audio_config_t*
 ghb_get_audio_info(const hb_title_t *title, gint track)
 {
@@ -4189,135 +4122,313 @@ ghb_validate_audio(GhbValue *settings, GtkWindow *parent)
 }
 
 static void
-add_job(hb_handle_t *h, GhbValue *js, gint unique_id, int titleindex)
+add_job(hb_handle_t *h, GhbValue *js, gint unique_id)
 {
-    hb_list_t  * list;
-    const hb_title_t * title;
-    hb_job_t   * job;
-    hb_filter_object_t * filter;
-    gchar *filter_str;
-    gchar *dest_str = NULL;
-    GhbValue *prefs;
-
-    g_debug("add_job()\n");
-    if (h == NULL) return;
-    list = hb_get_titles( h );
-    if( !hb_list_count( list ) )
-    {
-        /* No valid title, stop right there */
-        return;
-    }
+    json_t * dict;
+    json_error_t error;
+    int ii, count;
 
-    title = hb_list_item( list, titleindex );
-    if (title == NULL) return;
-
-    /* Set job settings */
-    job = hb_job_init( (hb_title_t*)title );
-    if (job == NULL) return;
-
-    prefs = ghb_settings_get_value(js, "Preferences");
-    job->angle = ghb_settings_get_int(js, "angle");
-    job->start_at_preview = ghb_settings_get_int(js, "start_frame") + 1;
-    if (job->start_at_preview)
-    {
-        job->seek_points = ghb_settings_get_int(prefs, "preview_count");
-        job->pts_to_stop = ghb_settings_get_int(prefs, "live_duration") * 90000LL;
-    }
+    // Assumes that the UI has reduced geometry settings to only the
+    // necessary PAR value
 
-    const char *mux_id;
+    const char *mux_name;
     const hb_container_t *mux;
+    int mux_id;
 
-    mux_id = ghb_settings_get_const_string(js, "FileFormat");
-    mux = ghb_lookup_container_by_name(mux_id);
+    mux_name = ghb_settings_get_const_string(js, "FileFormat");
+    mux = ghb_lookup_container_by_name(mux_name);
 
-    job->mux = mux->format;
-    if (job->mux & HB_MUX_MASK_MP4)
+    mux_id = mux->format;
+
+    int p_to_p = -1, seek_points, chapter_markers = 0;
+    int64_t range_start, range_stop;
+    range_start = ghb_settings_get_int(js, "start_frame") + 1;
+    if (range_start)
     {
-        job->mp4_optimize = ghb_settings_get_boolean(js, "Mp4HttpOptimize");
+        GhbValue *prefs = ghb_settings_get_value(js, "Preferences");
+        seek_points = ghb_settings_get_int(prefs, "preview_count");
+        range_stop = ghb_settings_get_int(prefs, "live_duration") * 90000LL;
     }
     else
     {
-        job->mp4_optimize = FALSE;
+        chapter_markers = ghb_settings_get_boolean(js, "ChapterMarkers");
+        p_to_p = ghb_settings_combo_int(js, "PtoPType");
+        switch (p_to_p)
+        {
+            default:
+            case 0: // Chapter range
+            {
+                range_start = ghb_settings_get_int(js, "start_point");
+                range_stop  = ghb_settings_get_int(js, "end_point");
+                if (range_start == range_stop)
+                    chapter_markers = 0;
+            } break;
+            case 1: // PTS range
+            {
+                double start, end;
+                start = ghb_settings_get_double(js, "start_point");
+                end   = ghb_settings_get_double(js, "end_point");
+                range_start = (int64_t)start * 90000;
+                range_stop  = (int64_t)end   * 90000 - range_start;
+            } break;
+            case 2: // Frame range
+            {
+                range_start = ghb_settings_get_int(js, "start_point") - 1;
+                range_stop  = ghb_settings_get_int(js, "end_point")   - 1 -
+                              range_start;
+            } break;
+        }
     }
-    if (!job->start_at_preview)
-    {
-        gint num_chapters = hb_list_count(title->list_chapter);
-        double duration = title->duration / 90000;
-        job->chapter_markers = FALSE;
-        job->chapter_start = 1;
-        job->chapter_end = num_chapters;
 
-        if (ghb_settings_combo_int(js, "PtoPType") == 0)
+    const char *path = ghb_settings_get_const_string(js, "source");
+    int title_id = ghb_settings_get_int(js, "title");
+
+    int angle = ghb_settings_get_int(js, "angle");
+
+    hb_rational_t par;
+    par.num = ghb_settings_get_int(js, "PicturePARWidth");
+    par.den = ghb_settings_get_int(js, "PicturePARHeight");
+
+    int vcodec, acodec_copy_mask, acodec_fallback, grayscale;
+    vcodec = ghb_settings_video_encoder_codec(js, "VideoEncoder");
+    acodec_copy_mask = ghb_get_copy_mask(js);
+    acodec_fallback = ghb_settings_audio_encoder_codec(js, "AudioEncoderFallback");
+    grayscale   = ghb_settings_get_boolean(js, "VideoGrayScale");
+
+    dict = json_pack_ex(&error, 0,
+    "{"
+    // SequenceID
+    "s:o,"
+    // Destination {Mux, ChapterMarkers, ChapterList}
+    "s:{s:o, s:o, s[]},"
+    // Source {Path, Title, Angle}
+    "s:{s:o, s:o, s:o,},"
+    // PAR {Num, Den}
+    "s:{s:o, s:o},"
+    // Video {Codec}
+    "s:{s:o},"
+    // Audio {CopyMask, FallbackEncoder, AudioList []}
+    "s:{s:o, s:o, s:[]},"
+    // Subtitles {Search {}, SubtitleList []}
+    "s:{s:{}, s:[]},"
+    // MetaData
+    "s:{},"
+    // Filters {Grayscale, FilterList []}
+    "s:{s:o, s:[]}"
+    "}",
+        "SequenceID",           json_integer(unique_id),
+        "Destination",
+            "Mux",              json_integer(mux_id),
+            "ChapterMarkers",   json_boolean(chapter_markers),
+            "ChapterList",
+        "Source",
+            "Path",             json_string(path),
+            "Title",            json_integer(title_id),
+            "Angle",            json_integer(angle),
+        "PAR",
+            "Num",              json_integer(par.num),
+            "Den",              json_integer(par.den),
+        "Video",
+            "Codec",            json_integer(vcodec),
+        "Audio",
+            "CopyMask",         json_integer(acodec_copy_mask),
+            "FallbackEncoder",  json_integer(acodec_fallback),
+            "AudioList",
+        "Subtitle",
+            "Search",
+            "SubtitleList",
+        "MetaData",
+        "Filter",
+            "Grayscale",        json_boolean(grayscale),
+            "FilterList"
+    );
+    if (dict == NULL)
+    {
+        printf("json pack job failure: %s\n", error.text);
+        return;
+    }
+    const char *dest = ghb_settings_get_const_string(js, "destination");
+    json_t *dest_dict = json_object_get(dict, "Destination");
+    if (dest != NULL)
+    {
+        json_object_set_new(dest_dict, "File", json_string(dest));
+    }
+    if (mux_id & HB_MUX_MASK_MP4)
+    {
+        int mp4_optimize, ipod_atom = 0;
+        mp4_optimize = ghb_settings_get_boolean(js, "Mp4HttpOptimize");
+        if (vcodec == HB_VCODEC_X264)
         {
-            gint start, end;
-            start = ghb_settings_get_int(js, "start_point");
-            end = ghb_settings_get_int(js, "end_point");
-            job->chapter_start = MIN( num_chapters, start );
-            job->chapter_end   = MAX( job->chapter_start, end );
-
+            ipod_atom = ghb_settings_get_boolean(js, "Mp4iPodCompatible");
         }
-        if (ghb_settings_combo_int(js, "PtoPType") == 1)
+        json_t *mp4_dict;
+        mp4_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+            "Mp4Optimize",      json_boolean(mp4_optimize),
+            "IpodAtom",         json_boolean(ipod_atom));
+        if (mp4_dict == NULL)
         {
-            double start, end;
-            start = ghb_settings_get_double(js, "start_point");
-            end = ghb_settings_get_double(js, "end_point");
-            job->pts_to_start = (int64_t)(MIN(duration, start) * 90000);
-            job->pts_to_stop = (int64_t)(MAX(start, end) * 90000) -
-                                        job->pts_to_start;
+            printf("json pack mp4 failure: %s\n", error.text);
+            return;
         }
-        if (ghb_settings_combo_int(js, "PtoPType") == 2)
+        json_object_set_new(dest_dict, "Mp4Options", mp4_dict);
+    }
+    json_t *source_dict = json_object_get(dict, "Source");
+    json_t *range_dict;
+    switch (p_to_p)
+    {
+        case -1: // Live preview range
         {
-            gint start, end;
-            start = ghb_settings_get_int(js, "start_point");
-            end = ghb_settings_get_int(js, "end_point");
-            gint64 max_frames;
-            max_frames = (gint64)duration * title->vrate.num / title->vrate.den;
-            job->frame_to_start = (int64_t)MIN(max_frames-1, start-1);
-            job->frame_to_stop = (int64_t)MAX(start, end-1) -
-                                 job->frame_to_start;
-        }
-        if (job->chapter_start != job->chapter_end)
+            range_dict = json_pack_ex(&error, 0, "{s:o, s:o, s:o}",
+                "StartAtPreview",   json_integer(range_start),
+                "PtsToStop",        json_integer(range_stop),
+                "SeekPoints",       json_integer(seek_points));
+            if (range_dict == NULL)
+            {
+                printf("json pack live range failure: %s\n", error.text);
+                return;
+            }
+        } break;
+        default:
+        case 0: // Chapter range
         {
-            job->chapter_markers = ghb_settings_get_boolean(js, "ChapterMarkers");
-        }
-        if (job->chapter_start == job->chapter_end)
-            job->chapter_markers = 0;
-        if ( job->chapter_markers )
+            range_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+                "ChapterStart", json_integer(range_start),
+                "ChapterEnd",   json_integer(range_stop));
+            if (range_dict == NULL)
+            {
+                printf("json pack chapter range failure: %s\n", error.text);
+                return;
+            }
+        } break;
+        case 1: // PTS range
+        {
+            range_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+                "PtsToStart",   json_integer(range_start),
+                "PtsToStop",    json_integer(range_stop));
+            if (range_dict == NULL)
+            {
+                printf("json pack pts range failure: %s\n", error.text);
+                return;
+            }
+        } break;
+        case 2: // Frame range
         {
-            GhbValue *chapters;
-            GhbValue *chapter;
-            gint chap;
-            gint count;
-
-            chapters = ghb_settings_get_value(js, "chapter_list");
-            count = ghb_array_len(chapters);
-            for(chap = 0; chap < count; chap++)
+            range_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+                "FrameToStart", json_integer(range_start),
+                "FrameToStop",  json_integer(range_stop));
+            if (range_dict == NULL)
             {
-                hb_chapter_t * chapter_s;
-                gchar *name;
+                printf("json pack frame range failure: %s\n", error.text);
+                return;
+            }
+        } break;
+    }
+    json_object_set_new(source_dict, "Range", range_dict);
 
-                name = NULL;
-                chapter = ghb_array_get_nth(chapters, chap);
-                name = ghb_value_string(chapter);
-                if (name == NULL)
-                {
-                    name = g_strdup_printf (_("Chapter %2d"), chap+1);
-                }
-                chapter_s = hb_list_item( job->list_chapter, chap);
-                hb_chapter_set_title(chapter_s, name);
-                g_free(name);
+    json_t *video_dict = json_object_get(dict, "Video");
+    if (ghb_settings_get_boolean(js, "vquality_type_constant"))
+    {
+        double vquality = ghb_settings_get_double(js, "VideoQualitySlider");
+        json_object_set_new(video_dict, "Quality", json_real(vquality));
+    }
+    else if (ghb_settings_get_boolean(js, "vquality_type_bitrate"))
+    {
+        int vbitrate, twopass, fastfirstpass;
+        vbitrate = ghb_settings_get_int(js, "VideoAvgBitrate");
+        twopass = ghb_settings_get_boolean(js, "VideoTwoPass");
+        fastfirstpass = ghb_settings_get_boolean(js, "VideoTurboTwoPass");
+        json_object_set_new(video_dict, "Bitrate", json_integer(vbitrate));
+        json_object_set_new(video_dict, "TwoPass", json_boolean(twopass));
+        json_object_set_new(video_dict, "Turbo", json_boolean(fastfirstpass));
+    }
+    ghb_set_video_encoder_opts(video_dict, js);
+
+    json_t *meta_dict = json_object_get(dict, "MetaData");
+    const char * meta;
+
+    meta = ghb_settings_get_const_string(js, "MetaName");
+    if (meta && *meta)
+    {
+        json_object_set_new(meta_dict, "Name", json_string(meta));
+    }
+    meta = ghb_settings_get_const_string(js, "MetaArtist");
+    if (meta && *meta)
+    {
+        json_object_set_new(meta_dict, "Artist", json_string(meta));
+    }
+    meta = ghb_settings_get_const_string(js, "MetaAlbumArtist");
+    if (meta && *meta)
+    {
+        json_object_set_new(meta_dict, "AlbumArtist", json_string(meta));
+    }
+    meta = ghb_settings_get_const_string(js, "MetaReleaseDate");
+    if (meta && *meta)
+    {
+        json_object_set_new(meta_dict, "ReleaseDate", json_string(meta));
+    }
+    meta = ghb_settings_get_const_string(js, "MetaComment");
+    if (meta && *meta)
+    {
+        json_object_set_new(meta_dict, "Comment", json_string(meta));
+    }
+    meta = ghb_settings_get_const_string(js, "MetaGenre");
+    if (meta && *meta)
+    {
+        json_object_set_new(meta_dict, "Genre", json_string(meta));
+    }
+    meta = ghb_settings_get_const_string(js, "MetaDescription");
+    if (meta && *meta)
+    {
+        json_object_set_new(meta_dict, "Description", json_string(meta));
+    }
+    meta = ghb_settings_get_const_string(js, "MetaLongDescription");
+    if (meta && *meta)
+    {
+        json_object_set_new(meta_dict, "LongDescription", json_string(meta));
+    }
+
+    // process chapter list
+    if (chapter_markers)
+    {
+        json_t *chapter_list = json_object_get(dest_dict, "ChapterList");
+        GhbValue *chapters;
+        GhbValue *chapter;
+        gint chap;
+        gint count;
+
+        chapters = ghb_settings_get_value(js, "chapter_list");
+        count = ghb_array_len(chapters);
+        for(chap = 0; chap < count; chap++)
+        {
+            json_t *chapter_dict;
+            gchar *name;
+
+            name = NULL;
+            chapter = ghb_array_get_nth(chapters, chap);
+            name = ghb_value_string(chapter);
+            if (name == NULL)
+            {
+                name = g_strdup_printf (_("Chapter %2d"), chap+1);
             }
+            chapter_dict = json_pack_ex(&error, 0, "{s:o}",
+                                    "Name", json_string(name));
+            if (chapter_dict == NULL)
+            {
+                printf("json pack chapter failure: %s\n", error.text);
+                return;
+            }
+            json_array_append_new(chapter_list, chapter_dict);
+            g_free(name);
         }
     }
 
-    gboolean decomb_deint = ghb_settings_get_boolean(js, "PictureDecombDeinterlace");
-    gint decomb = ghb_settings_combo_int(js, "PictureDecomb");
-    gint deint = ghb_settings_combo_int(js, "PictureDeinterlace");
-    job->grayscale   = ghb_settings_get_boolean(js, "VideoGrayScale");
-
-    job->par.num = ghb_settings_get_int(js, "PicturePARWidth");
-    job->par.den = ghb_settings_get_int(js, "PicturePARHeight");
+    // Create filter list
+    json_t *filters_dict = json_object_get(dict, "Filter");
+    json_t *filter_list = json_object_get(filters_dict, "FilterList");
+    json_t *filter_dict;
+    char *filter_str;
 
+    // Crop scale filter
     int width, height, crop[4];
     width = ghb_settings_get_int(js, "scale_width");
     height = ghb_settings_get_int(js, "scale_height");
@@ -4329,54 +4440,108 @@ add_job(hb_handle_t *h, GhbValue *js, gint unique_id, int titleindex)
 
     filter_str = g_strdup_printf("%d:%d:%d:%d:%d:%d",
                             width, height, crop[0], crop[1], crop[2], crop[3]);
-    filter = hb_filter_init(HB_FILTER_CROP_SCALE);
-    hb_add_filter( job, filter, filter_str );
+    filter_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+                            "ID",       json_integer(HB_FILTER_CROP_SCALE),
+                            "Settings", json_string(filter_str));
+    if (filter_dict == NULL)
+    {
+        printf("json pack scale filter failure: %s\n", error.text);
+        return;
+    }
+    json_array_append_new(filter_list, filter_dict);
     g_free(filter_str);
 
-    /* Add selected filters */
+    // detelecine filter
     gint detel = ghb_settings_combo_int(js, "PictureDetelecine");
-    if ( detel )
+    if (detel)
     {
-        filter_str = NULL;
+        const char *filter_str = NULL;
         if (detel != 1)
         {
             if (detel_opts.map[detel].svalue != NULL)
-                filter_str = g_strdup(detel_opts.map[detel].svalue);
+                filter_str = detel_opts.map[detel].svalue;
         }
         else
-            filter_str = ghb_settings_get_string(js, "PictureDetelecineCustom");
-        filter = hb_filter_init(HB_FILTER_DETELECINE);
-        hb_add_filter( job, filter, filter_str );
-        g_free(filter_str);
+        {
+            filter_str = ghb_settings_get_const_string(js, "PictureDetelecineCustom");
+        }
+        filter_dict = json_pack_ex(&error, 0, "{s:o}",
+                                "ID", json_integer(HB_FILTER_DETELECINE));
+        if (filter_dict == NULL)
+        {
+            printf("json pack detelecine filter failure: %s\n", error.text);
+            return;
+        }
+        if (filter_str != NULL)
+        {
+            json_object_set_new(filter_dict, "Settings",
+                                json_string(filter_str));
+        }
+        json_array_append_new(filter_list, filter_dict);
     }
-    if ( decomb_deint && decomb )
+
+    // Decomb filter
+    gboolean decomb_deint;
+    gint decomb, deint;
+    decomb_deint = ghb_settings_get_boolean(js, "PictureDecombDeinterlace");
+    decomb = ghb_settings_combo_int(js, "PictureDecomb");
+    deint = ghb_settings_combo_int(js, "PictureDeinterlace");
+    if (decomb_deint && decomb)
     {
-        filter_str = NULL;
+        const char *filter_str = NULL;
         if (decomb != 1)
         {
             if (decomb_opts.map[decomb].svalue != NULL)
-                filter_str = g_strdup(decomb_opts.map[decomb].svalue);
+                filter_str = decomb_opts.map[decomb].svalue;
         }
         else
-            filter_str = ghb_settings_get_string(js, "PictureDecombCustom");
-        filter = hb_filter_init(HB_FILTER_DECOMB);
-        hb_add_filter( job, filter, filter_str );
-        g_free(filter_str);
+        {
+            filter_str = ghb_settings_get_const_string(js, "PictureDecombCustom");
+        }
+        filter_dict = json_pack_ex(&error, 0, "{s:o}",
+                                "ID", json_integer(HB_FILTER_DECOMB));
+        if (filter_dict == NULL)
+        {
+            printf("json pack decomb filter failure: %s\n", error.text);
+            return;
+        }
+        if (filter_str != NULL)
+        {
+            json_object_set_new(filter_dict, "Settings",
+                                json_string(filter_str));
+        }
+        json_array_append_new(filter_list, filter_dict);
     }
+
+    // Deinterlace filter
     if ( !decomb_deint && deint )
     {
-        filter_str = NULL;
+        const char *filter_str = NULL;
         if (deint != 1)
         {
             if (deint_opts.map[deint].svalue != NULL)
-                filter_str = g_strdup(deint_opts.map[deint].svalue);
+                filter_str = deint_opts.map[deint].svalue;
         }
         else
-            filter_str = ghb_settings_get_string(js, "PictureDeinterlaceCustom");
-        filter = hb_filter_init(HB_FILTER_DEINTERLACE);
-        hb_add_filter( job, filter, filter_str );
-        g_free(filter_str);
+        {
+            filter_str = ghb_settings_get_const_string(js, "PictureDeinterlaceCustom");
+        }
+        filter_dict = json_pack_ex(&error, 0, "{s:o}",
+                                "ID", json_integer(HB_FILTER_DEINTERLACE));
+        if (filter_dict == NULL)
+        {
+            printf("json pack deinterlace filter failure: %s\n", error.text);
+            return;
+        }
+        if (filter_str != NULL)
+        {
+            json_object_set_new(filter_dict, "Settings",
+                                json_string(filter_str));
+        }
+        json_array_append_new(filter_list, filter_dict);
     }
+
+    // Denoise filter
     if (strcmp(ghb_settings_get_const_string(js, "PictureDenoiseFilter"), "off"))
     {
         int filter_id = HB_FILTER_HQDN3D;
@@ -4387,8 +4552,15 @@ add_job(hb_handle_t *h, GhbValue *js, gint unique_id, int titleindex)
         {
             const char *filter_str;
             filter_str = ghb_settings_get_const_string(js, "PictureDenoiseCustom");
-            filter = hb_filter_init(filter_id);
-            hb_add_filter( job, filter, filter_str );
+            filter_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+                                "ID",       json_integer(filter_id),
+                                "Settings", json_string(filter_str));
+            if (filter_dict == NULL)
+            {
+                printf("json pack denoise filter failure: %s\n", error.text);
+                return;
+            }
+            json_array_append_new(filter_list, filter_dict);
         }
         else
         {
@@ -4396,45 +4568,37 @@ add_job(hb_handle_t *h, GhbValue *js, gint unique_id, int titleindex)
             preset = ghb_settings_get_const_string(js, "PictureDenoisePreset");
             tune = ghb_settings_get_const_string(js, "PictureDenoiseTune");
             filter_str = hb_generate_filter_settings(filter_id, preset, tune);
-            filter = hb_filter_init(filter_id);
-            hb_add_filter( job, filter, filter_str );
+            filter_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+                                "ID",       json_integer(filter_id),
+                                "Settings", json_string(filter_str));
+            if (filter_dict == NULL)
+            {
+                printf("json pack denoise filter failure: %s\n", error.text);
+                return;
+            }
+            json_array_append_new(filter_list, filter_dict);
             g_free(filter_str);
         }
     }
+
+    // Deblock filter
     gint deblock = ghb_settings_get_int(js, "PictureDeblock");
     if( deblock >= 5 )
     {
-        filter_str = NULL;
         filter_str = g_strdup_printf("%d", deblock);
-        filter = hb_filter_init(HB_FILTER_DEBLOCK);
-        hb_add_filter( job, filter, filter_str );
+        filter_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+                            "ID",       json_integer(HB_FILTER_DEBLOCK),
+                            "Settings", json_string(filter_str));
+        if (filter_dict == NULL)
+        {
+            printf("json pack deblock filter failure: %s\n", error.text);
+            return;
+        }
+        json_array_append_new(filter_list, filter_dict);
         g_free(filter_str);
     }
 
-    job->vcodec = ghb_settings_video_encoder_codec(js, "VideoEncoder");
-    if ((job->mux & HB_MUX_MASK_MP4 ) && (job->vcodec == HB_VCODEC_THEORA))
-    {
-        // mp4/theora combination is not supported.
-        job->vcodec = HB_VCODEC_FFMPEG_MPEG4;
-    }
-    if ((job->vcodec == HB_VCODEC_X264) && (job->mux & HB_MUX_MASK_MP4))
-    {
-        job->ipod_atom = ghb_settings_get_boolean(js, "Mp4iPodCompatible");
-    }
-    if (ghb_settings_get_boolean(js, "vquality_type_constant"))
-    {
-        gdouble vquality;
-        vquality = ghb_settings_get_double(js, "VideoQualitySlider");
-        job->vquality =  vquality;
-        job->vbitrate = 0;
-    }
-    else if (ghb_settings_get_boolean(js, "vquality_type_bitrate"))
-    {
-        job->vquality = -1.0;
-        job->vbitrate = ghb_settings_get_int(js, "VideoAvgBitrate");
-    }
-
-    gint vrate_num;
+    // VFR filter
     gint vrate_den = ghb_settings_video_framerate_rate(js, "VideoFramerate");
     gint cfr;
     if (ghb_settings_get_boolean(js, "VideoFrameratePFR"))
@@ -4451,122 +4615,110 @@ add_job(hb_handle_t *h, GhbValue *js, gint unique_id, int titleindex)
         ghb_log("zerolatency x264 tune selected, forcing constant framerate");
     }
 
-    if( vrate_den == 0 )
+    if (vrate_den == 0)
     {
-        vrate_num = title->vrate.num;
-        vrate_den = title->vrate.den;
+        filter_str = g_strdup_printf("%d", cfr);
     }
     else
     {
-        vrate_num = 27000000;
+        filter_str = g_strdup_printf("%d:%d:%d", cfr, 27000000, vrate_den);
     }
-    filter_str = g_strdup_printf("%d:%d:%d", cfr, vrate_num, vrate_den);
-    filter = hb_filter_init(HB_FILTER_VFR);
-    hb_add_filter( job, filter, filter_str );
+    filter_dict = json_pack_ex(&error, 0, "{s:o, s:o}",
+                        "ID",       json_integer(HB_FILTER_VFR),
+                        "Settings", json_string(filter_str));
+    if (filter_dict == NULL)
+    {
+        printf("json pack vfr filter failure: %s\n", error.text);
+        return;
+    }
+    json_array_append_new(filter_list, filter_dict);
     g_free(filter_str);
 
+    // Create audio list
+    json_t *audios_dict = json_object_get(dict, "Audio");
+    json_t *json_audio_list = json_object_get(audios_dict, "AudioList");
     const GhbValue *audio_list;
-    gint count, ii;
-    gint tcount = 0;
 
     audio_list = ghb_settings_get_value(js, "audio_list");
     count = ghb_array_len(audio_list);
     for (ii = 0; ii < count; ii++)
     {
+        json_t *audio_dict;
         GhbValue *asettings;
-        hb_audio_config_t audio;
-        hb_audio_config_t *aconfig;
-        gint acodec, fallback;
+        int track, acodec, mixdown, samplerate;
+        const char *aname;
+        double gain, drc, quality;
 
-        hb_audio_config_init(&audio);
         asettings = ghb_array_get_nth(audio_list, ii);
-        audio.in.track = ghb_settings_get_int(asettings, "AudioTrack");
-        audio.out.track = tcount;
-
-        char * aname = ghb_settings_get_string(asettings, "AudioTrackName");
-        if (aname && *aname)
+        track = ghb_settings_get_int(asettings, "AudioTrack");
+        aname = ghb_settings_get_const_string(asettings, "AudioTrackName");
+        acodec = ghb_settings_audio_encoder_codec(asettings, "AudioEncoder");
+        audio_dict = json_pack_ex(&error, 0,
+            "{s:o, s:o}",
+            "Track",                json_integer(track),
+            "Encoder",              json_integer(acodec));
+        if (audio_dict == NULL)
         {
-            // This leaks, but there is no easy way to clean up
-            // presently
-            audio.out.name = aname;
+            printf("json pack audio failure: %s\n", error.text);
+            return;
         }
-        else
+        if (aname != NULL && aname[0] != 0)
         {
-            g_free(aname);
+            json_object_set_new(audio_dict, "Name", json_string(aname));
         }
 
-        aconfig = (hb_audio_config_t *) hb_list_audio_config_item(
-                                    title->list_audio, audio.in.track );
-
-        acodec = ghb_settings_audio_encoder_codec(asettings, "AudioEncoder");
-
-        fallback = ghb_settings_audio_encoder_codec(js, "AudioEncoderFallback");
-        gint copy_mask = ghb_get_copy_mask(js);
-        audio.out.codec = ghb_select_audio_codec(job->mux, aconfig, acodec, fallback, copy_mask);
-
-        audio.out.gain =
-            ghb_settings_get_double(asettings, "AudioTrackGainSlider");
-
-        audio.out.dynamic_range_compression =
-            ghb_settings_get_double(asettings, "AudioTrackDRCSlider");
-        if (audio.out.dynamic_range_compression < 1.0)
-            audio.out.dynamic_range_compression = 0.0;
-
         // It would be better if this were done in libhb for us, but its not yet.
-        if (ghb_audio_is_passthru(audio.out.codec))
+        if (!ghb_audio_is_passthru(acodec))
         {
-            audio.out.mixdown = 0;
-        }
-        else
-        {
-            audio.out.mixdown = ghb_settings_mixdown_mix(asettings, "AudioMixdown");
-            // Make sure the mixdown is valid and pick a new one if not.
-            audio.out.mixdown = ghb_get_best_mix(aconfig, audio.out.codec,
-                                                    audio.out.mixdown);
-            gint srate;
-            srate = ghb_settings_audio_samplerate_rate(
+            gain = ghb_settings_get_double(asettings, "AudioTrackGainSlider");
+            if (gain > 0)
+                json_object_set_new(audio_dict, "Gain", json_real(gain));
+            drc = ghb_settings_get_double(asettings, "AudioTrackDRCSlider");
+            if (drc < 1.0)
+                drc = 0.0;
+            if (drc > 0)
+                json_object_set_new(audio_dict, "DRC", json_real(drc));
+
+            mixdown = ghb_settings_mixdown_mix(asettings, "AudioMixdown");
+            json_object_set_new(audio_dict, "Mixdown", json_integer(mixdown));
+
+            samplerate = ghb_settings_audio_samplerate_rate(
                                             asettings, "AudioSamplerate");
-            if (srate == 0) // 0 is same as source
-                audio.out.samplerate = aconfig->in.samplerate;
-            else
-                audio.out.samplerate = srate;
-            double quality = ghb_settings_get_double(asettings, "AudioTrackQuality");
-            if (ghb_settings_get_boolean(asettings, "AudioTrackQualityEnable") &&
-                quality != HB_INVALID_AUDIO_QUALITY)
+            json_object_set_new(audio_dict, "Samplerate",
+                                json_integer(samplerate));
+            gboolean qe;
+            qe = ghb_settings_get_boolean(asettings, "AudioTrackQualityEnable");
+            quality = ghb_settings_get_double(asettings, "AudioTrackQuality");
+            if (qe && quality != HB_INVALID_AUDIO_QUALITY)
             {
-                audio.out.quality = quality;
-                audio.out.bitrate = -1;
+                json_object_set_new(audio_dict, "Quality", json_real(quality));
             }
             else
             {
-                audio.out.quality = HB_INVALID_AUDIO_QUALITY;
-                audio.out.bitrate =
+                int bitrate =
                     ghb_settings_audio_bitrate_rate(asettings, "AudioBitrate");
-
-                audio.out.bitrate = hb_audio_bitrate_get_best(
-                    audio.out.codec, audio.out.bitrate,
-                    audio.out.samplerate, audio.out.mixdown);
+                bitrate = hb_audio_bitrate_get_best(
+                                        acodec, bitrate, samplerate, mixdown);
+                json_object_set_new(audio_dict, "Bitrate",
+                                    json_integer(bitrate));
             }
         }
 
-        // Add it to the jobs audio list
-        hb_audio_add( job, &audio );
-        tcount++;
+        json_array_append_new(json_audio_list, audio_dict);
     }
 
-    dest_str = ghb_settings_get_string(js, "destination");
-    hb_job_set_file( job, dest_str);
-    g_free(dest_str);
-
+    // Create subtitle list
+    json_t *subtitles_dict = json_object_get(dict, "Subtitle");
+    json_t *json_subtitle_list = json_object_get(subtitles_dict, "SubtitleList");
     const GhbValue *subtitle_list;
-    gint subtitle;
-    gboolean force, burned, def, one_burned = FALSE;
 
-    ghb_settings_set_boolean(js, "subtitle_scan", FALSE);
     subtitle_list = ghb_settings_get_value(js, "subtitle_list");
     count = ghb_array_len(subtitle_list);
     for (ii = 0; ii < count; ii++)
     {
+        json_t *subtitle_dict;
+        gint track;
+        gboolean force, burned, def, one_burned = FALSE;
         GhbValue *ssettings;
         gint source;
 
@@ -4579,174 +4731,104 @@ add_job(hb_handle_t *h, GhbValue *js, gint unique_id, int titleindex)
 
         if (source == SRTSUB)
         {
-            hb_subtitle_config_t sub_config;
-            gchar *filename, *lang, *code;
-
-            filename = ghb_settings_get_string(ssettings, "SrtFile");
+            const gchar *filename, *lang, *code;
+            int offset;
+            filename = ghb_settings_get_const_string(ssettings, "SrtFile");
             if (!g_file_test(filename, G_FILE_TEST_IS_REGULAR))
             {
                 continue;
             }
-            sub_config.offset = ghb_settings_get_int(ssettings, "SrtOffset");
-            lang = ghb_settings_get_string(ssettings, "SrtLanguage");
-            code = ghb_settings_get_string(ssettings, "SrtCodeset");
-            strncpy(sub_config.src_filename, filename, 255);
-            sub_config.src_filename[255] = 0;
-            strncpy(sub_config.src_codeset, code, 39);
-            sub_config.src_codeset[39] = 0;
-            sub_config.force = 0;
-            sub_config.default_track = def;
+            offset = ghb_settings_get_int(ssettings, "SrtOffset");
+            lang = ghb_settings_get_const_string(ssettings, "SrtLanguage");
+            code = ghb_settings_get_const_string(ssettings, "SrtCodeset");
             if (burned && !one_burned && hb_subtitle_can_burn(SRTSUB))
             {
                 // Only allow one subtitle to be burned into the video
-                sub_config.dest = RENDERSUB;
                 one_burned = TRUE;
             }
             else
             {
-                sub_config.dest = PASSTHRUSUB;
+                burned = FALSE;
             }
-
-            hb_srt_add( job, &sub_config, lang);
-
-            g_free(filename);
-            g_free(lang);
-            g_free(code);
-            continue;
+            subtitle_dict = json_pack_ex(&error, 0,
+                "{s:o, s:o, s:o, s:{s:o, s:o, s:o}}",
+                "Default",  json_boolean(def),
+                "Burn",     json_boolean(burned),
+                "Offset",   json_integer(offset),
+                "SRT",
+                    "Filename", json_string(filename),
+                    "Language", json_string(lang),
+                    "Codeset",  json_string(code));
+            if (subtitle_dict == NULL)
+            {
+                printf("json pack srt failure: %s\n", error.text);
+                return;
+            }
+            json_array_append_new(json_subtitle_list, subtitle_dict);
         }
 
-        subtitle = ghb_settings_get_int(ssettings, "SubtitleTrack");
-        if (subtitle == -1)
+        track = ghb_settings_get_int(ssettings, "SubtitleTrack");
+        if (track == -1)
         {
+            json_t *search = json_object_get(subtitles_dict, "Search");
             if (burned && !one_burned)
             {
                 // Only allow one subtitle to be burned into the video
-                job->select_subtitle_config.dest = RENDERSUB;
                 one_burned = TRUE;
             }
             else
             {
-                job->select_subtitle_config.dest = PASSTHRUSUB;
+                burned = FALSE;
             }
-            job->select_subtitle_config.force = force;
-            job->select_subtitle_config.default_track = def;
-            job->indepth_scan = 1;
-            ghb_settings_set_boolean(js, "subtitle_scan", TRUE);
+            json_object_set_new(search, "Enable", json_boolean(TRUE));
+            json_object_set_new(search, "Forced", json_boolean(force));
+            json_object_set_new(search, "Default", json_boolean(def));
+            json_object_set_new(search, "Burn", json_boolean(burned));
         }
-        else if (subtitle >= 0)
+        else if (track >= 0)
         {
-            hb_subtitle_t * subt;
-            hb_subtitle_config_t sub_config;
+            if (burned && !one_burned && hb_subtitle_can_burn(source))
+            {
+                // Only allow one subtitle to be burned into the video
+                one_burned = TRUE;
+            }
+            else
+            {
+                burned = FALSE;
+            }
 
-            subt = hb_list_item(title->list_subtitle, subtitle);
-            if (subt != NULL)
+            subtitle_dict = json_pack_ex(&error, 0,
+            "{s:o, s:o, s:o, s:o}",
+                "Track",    json_integer(track),
+                "Default",  json_boolean(def),
+                "Force",    json_boolean(force),
+                "Burn",     json_boolean(burned));
+            if (subtitle_dict == NULL)
             {
-                sub_config = subt->config;
-                if (burned && !one_burned && hb_subtitle_can_burn(subt->source))
-                {
-                    // Only allow one subtitle to be burned into the video
-                    sub_config.dest = RENDERSUB;
-                    one_burned = TRUE;
-                }
-                else
-                {
-                    sub_config.dest = PASSTHRUSUB;
-                }
-                sub_config.force = force;
-                sub_config.default_track = def;
-                hb_subtitle_add( job, &sub_config, subtitle );
+                printf("json pack subtitle failure: %s\n", error.text);
+                return;
             }
+            json_array_append_new(json_subtitle_list, subtitle_dict);
         }
     }
-    if (one_burned)
-    {
-        // Add filter that renders vobsubs
-        filter = hb_filter_init(HB_FILTER_RENDER_SUB);
-        filter_str = g_strdup_printf("%d:%d:%d:%d",
-                                crop[0], crop[1], crop[2], crop[3]);
-        hb_add_filter( job, filter, filter_str );
-        g_free(filter_str);
-    }
-
-
-    char * meta;
-
-    meta = ghb_settings_get_string(js, "MetaName");
-    if (meta && *meta)
-    {
-        hb_metadata_set_name(job->metadata, meta);
-    }
-    free(meta);
-    meta = ghb_settings_get_string(js, "MetaArtist");
-    if (meta && *meta)
-    {
-        hb_metadata_set_artist(job->metadata, meta);
-    }
-    free(meta);
-    meta = ghb_settings_get_string(js, "MetaAlbumArtist");
-    if (meta && *meta)
-    {
-        hb_metadata_set_album_artist(job->metadata, meta);
-    }
-    free(meta);
-    meta = ghb_settings_get_string(js, "MetaReleaseDate");
-    if (meta && *meta)
-    {
-        hb_metadata_set_release_date(job->metadata, meta);
-    }
-    free(meta);
-    meta = ghb_settings_get_string(js, "MetaComment");
-    if (meta && *meta)
-    {
-        hb_metadata_set_comment(job->metadata, meta);
-    }
-    free(meta);
-    meta = ghb_settings_get_string(js, "MetaGenre");
-    if (meta && *meta)
-    {
-        hb_metadata_set_genre(job->metadata, meta);
-    }
-    free(meta);
-    meta = ghb_settings_get_string(js, "MetaDescription");
-    if (meta && *meta)
-    {
-        hb_metadata_set_description(job->metadata, meta);
-    }
-    free(meta);
-    meta = ghb_settings_get_string(js, "MetaLongDescription");
-    if (meta && *meta)
-    {
-        hb_metadata_set_long_description(job->metadata, meta);
-    }
-    free(meta);
 
-    job->twopass = ghb_settings_get_boolean(js, "VideoTwoPass");
-    job->fastfirstpass = ghb_settings_get_boolean(js, "VideoTurboTwoPass");
-    job->sequence_id = unique_id;
-    ghb_set_video_encoder_opts(job, js);
-    hb_add(h, job);
+    char *json_job = json_dumps(dict, JSON_INDENT(4));
+    json_decref(dict);
 
-    hb_job_close(&job);
+    hb_add_json(h, json_job);
+    free(json_job);
 }
 
 void
 ghb_add_job(GhbValue *js, gint unique_id)
 {
-    // Since I'm doing a scan of the single title I want just prior
-    // to adding the job, there is only the one title to choose from.
-    add_job(h_queue, js, unique_id, 0);
+    add_job(h_queue, js, unique_id);
 }
 
 void
 ghb_add_live_job(GhbValue *js, gint unique_id)
 {
-    int title_id, titleindex;
-    const hb_title_t *title;
-
-    title_id = ghb_settings_get_int(js, "title");
-    title = ghb_lookup_title(title_id, &titleindex);
-    (void)title; // Silence "unused variable" warning
-    add_job(h_scan, js, unique_id, titleindex);
+    add_job(h_live, js, unique_id);
 }
 
 void
@@ -4781,13 +4863,13 @@ ghb_stop_queue()
 void
 ghb_start_live_encode()
 {
-    hb_start( h_scan );
+    hb_start( h_live );
 }
 
 void
 ghb_stop_live_encode()
 {
-    hb_stop( h_scan );
+    hb_stop( h_live );
 }
 
 void
index d2040a3cbcdf3f35ce194fcb6b8add9873d9d6d2..53661e38ac195d364392dc59aa73f02b1f4116cf 100644 (file)
@@ -41,8 +41,9 @@ typedef struct
 
     // WORKING
     gint unique_id;
-    gint job_cur;
-    gint job_count;
+    gint pass_id;
+    gint pass;
+    gint pass_count;
     gdouble progress;
     gdouble rate_cur;
     gdouble rate_avg;
@@ -56,6 +57,7 @@ typedef struct
 {
     ghb_instance_status_t scan;
     ghb_instance_status_t queue;
+    ghb_instance_status_t live;
 } ghb_status_t;
 
 #define MOD_ROUND(v,m) ((m==1)?v:(m * ((v + (m>>1)) / m)))
@@ -100,6 +102,7 @@ void ghb_stop_live_encode();
 
 void ghb_clear_scan_state(gint state);
 void ghb_clear_queue_state(gint state);
+void ghb_clear_live_state(gint state);
 
 void ghb_set_state(gint state);
 gint ghb_get_scan_state();
index 3aca6b00ecad1143863495106b3730487d41b17c..3a08152bbaf47ec42499775f7144973454e22d9b 100644 (file)
@@ -127,7 +127,7 @@ hb_title_t * hb_batch_title_scan( hb_batch_t * d, int t )
         return NULL;
 
     hb_log( "batch: scanning %s", filename );
-    title = hb_title_init( filename, 0 );
+    title = hb_title_init( filename, t );
     stream = hb_stream_open( filename, title, 1 );
     if ( stream == NULL )
     {
@@ -137,10 +137,6 @@ hb_title_t * hb_batch_title_scan( hb_batch_t * d, int t )
 
     title = hb_stream_title_scan( stream, title );
     hb_stream_close( &stream );
-    if ( title != NULL )
-    {
-        title->index = t;
-    }
 
     return title;
 }
index 78b4437c6c2fa592bfaf9975a53896f431dbcde0..339c3919fe4e11076bb0100638b79c68c8a5ab5b 100644 (file)
@@ -2582,7 +2582,7 @@ void hb_list_rem( hb_list_t * l, void * p )
  *********************************************************************/
 void * hb_list_item( const hb_list_t * l, int i )
 {
-    if( i < 0 || i >= l->items_count )
+    if( l == NULL || i < 0 || i >= l->items_count )
     {
         return NULL;
     }
@@ -2712,6 +2712,9 @@ void hb_list_close( hb_list_t ** _l )
 {
     hb_list_t * l = *_l;
 
+    if (l == NULL)
+        return;
+
     free( l->items );
     free( l );
 
@@ -3048,7 +3051,7 @@ static void job_setup(hb_job_t * job, hb_title_t * title)
     job->vquality   = -1.0;
     job->vbitrate   = 1000;
     job->twopass    = 0;
-    job->pass       = 0;
+    job->pass_id    = HB_PASS_ENCODE;
     job->vrate      = title->vrate;
 
     job->mux = HB_MUX_MP4;
@@ -3078,6 +3081,8 @@ static void job_clean( hb_job_t * job )
         hb_filter_object_t *filter;
         hb_attachment_t *attachment;
 
+        free((void*)job->json);
+        job->json = NULL;
         free(job->encoder_preset);
         job->encoder_preset = NULL;
         free(job->encoder_tune);
index 41c5dd82263440bdc30fadfa589a3c2b704f00a0..a8a699387eb63c958b56caaecd807e9886b010b7 100644 (file)
@@ -446,6 +446,8 @@ typedef enum
  *****************************************************************************/
 struct hb_job_s
 {
+    PRIVATE const char  * json;   // JSON encoded job string
+
     /* ID assigned by UI so it can groups job passes together */
     int             sequence_id;
 
@@ -494,7 +496,7 @@ struct hb_job_s
     int             vbitrate;
     hb_rational_t   vrate;
     int             cfr;
-    PRIVATE int     pass;
+    PRIVATE int     pass_id;
     int             twopass;        // Enable 2-pass encode. Boolean
     int             fastfirstpass;
     char           *encoder_preset;
@@ -605,7 +607,6 @@ struct hb_job_s
 #ifdef __LIBHB__
     /* Internal data */
     hb_handle_t   * h;
-    hb_lock_t     * pause;
     volatile hb_error_code * done_error;
     volatile int  * die;
     volatile int    done;
@@ -993,9 +994,14 @@ struct hb_state_s
         struct
         {
             /* HB_STATE_WORKING */
+#define HB_PASS_SUBTITLE    -1
+#define HB_PASS_ENCODE      0
+#define HB_PASS_ENCODE_1ST  1   // Some code depends on these values being
+#define HB_PASS_ENCODE_2ND  2   // 1 and 2.  Do not change.
+            int   pass_id;
+            int   pass;
+            int   pass_count;
             float progress;
-            int   job_cur;
-            int   job_count;
             float rate_cur;
             float rate_avg;
             int   hours;
index b396f9c708a55b23edcceebfa3c78d9960a446a2..fe479a3935958fc8d2034f6de53366a2883e205b 100644 (file)
@@ -99,7 +99,7 @@ int encavcodecInit( hb_work_object_t * w, hb_job_t * job )
 
     // Set things in context that we will allow the user to 
     // override with advanced settings.
-    if( job->pass == 2 )
+    if( job->pass_id == HB_PASS_ENCODE_2ND )
     {
         hb_interjob_t * interjob = hb_interjob_get( job->h );
         fps.den = interjob->vrate.den;
@@ -237,12 +237,13 @@ int encavcodecInit( hb_work_object_t * w, hb_job_t * job )
         context->flags |= CODEC_FLAG_GRAY;
     }
 
-    if( job->pass != 0 && job->pass != -1 )
+    if( job->pass_id == HB_PASS_ENCODE_1ST ||
+        job->pass_id == HB_PASS_ENCODE_2ND )
     {
         char filename[1024]; memset( filename, 0, 1024 );
         hb_get_tempory_filename( job->h, filename, "ffmpeg.log" );
 
-        if( job->pass == 1 )
+        if( job->pass_id == HB_PASS_ENCODE_1ST )
         {
             pv->file = hb_fopen(filename, "wb");
             context->flags |= CODEC_FLAG_PASS1;
@@ -287,7 +288,7 @@ int encavcodecInit( hb_work_object_t * w, hb_job_t * job )
     {
         job->areBframes = 1;
     }
-    if( ( job->mux & HB_MUX_MASK_MP4 ) && job->pass != 1 )
+    if( ( job->mux & HB_MUX_MASK_MP4 ) && job->pass_id != HB_PASS_ENCODE_1ST )
     {
         w->config->mpeg4.length = context->extradata_size;
         memcpy( w->config->mpeg4.bytes, context->extradata,
@@ -589,7 +590,8 @@ int encavcodecWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
                 buf_last = buf;
             }
             /* Write stats */
-            if (job->pass == 1 && pv->context->stats_out != NULL)
+            if (job->pass_id == HB_PASS_ENCODE_1ST &&
+                pv->context->stats_out != NULL)
             {
                 fprintf( pv->file, "%s", pv->context->stats_out );
             }
index 8c9c4cffa7be9ce1439be26f295bbcd867aa0726..3b9dacebb2eb28908e8eea39c2fe340f1cbcc230 100644 (file)
@@ -44,12 +44,13 @@ int enctheoraInit( hb_work_object_t * w, hb_job_t * job )
 
     pv->job = job;
 
-    if( job->pass != 0 && job->pass != -1 )
+    if( job->pass_id == HB_PASS_ENCODE_1ST ||
+        job->pass_id == HB_PASS_ENCODE_2ND )
     {
         char filename[1024];
         memset( filename, 0, 1024 );
         hb_get_tempory_filename( job->h, filename, "theroa.log" );
-        if ( job->pass == 1 )
+        if ( job->pass_id == HB_PASS_ENCODE_1ST )
         {
             pv->file = hb_fopen(filename, "wb");
         }
@@ -71,7 +72,7 @@ int enctheoraInit( hb_work_object_t * w, hb_job_t * job )
     ti.frame_height = (job->height + 0xf) & ~0xf;
     ti.pic_x = ti.pic_y = 0;
 
-    if( job->pass == 2 )
+    if( job->pass_id == HB_PASS_ENCODE_2ND )
     {
         hb_interjob_t * interjob = hb_interjob_get( job->h );
         ti.fps_numerator = interjob->vrate.num;
@@ -125,7 +126,8 @@ int enctheoraInit( hb_work_object_t * w, hb_job_t * job )
     {
         hb_log("theora: Could not set soft ratecontrol");
     }
-    if( job->pass != 0 && job->pass != -1 )
+    if( job->pass_id == HB_PASS_ENCODE_1ST ||
+        job->pass_id == HB_PASS_ENCODE_2ND )
     {
         arg = keyframe_frequency * 7 >> 1;
         ret = th_encode_ctl(pv->ctx, TH_ENCCTL_SET_RATE_BUFFER, &arg, sizeof(arg));
@@ -135,7 +137,7 @@ int enctheoraInit( hb_work_object_t * w, hb_job_t * job )
         }
     }
 
-    if( job->pass == 1 )
+    if( job->pass_id == HB_PASS_ENCODE_1ST )
     {
         unsigned char *buffer;
         int bytes;
@@ -153,7 +155,7 @@ int enctheoraInit( hb_work_object_t * w, hb_job_t * job )
         }
         fflush( pv->file );
     }
-    if( job->pass == 2 )
+    if( job->pass_id == HB_PASS_ENCODE_2ND )
     {
         /* Enable the second pass here.
          * We make this call just to set the encoder into 2-pass mode, because
@@ -233,7 +235,7 @@ int enctheoraWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
         *buf_out = in;
         *buf_in = NULL;
         th_encode_packetout( pv->ctx, 1, &op );
-        if( job->pass == 1 )
+        if( job->pass_id == HB_PASS_ENCODE_1ST )
         {
             unsigned char *buffer;
             int bytes;
@@ -256,7 +258,7 @@ int enctheoraWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
         return HB_WORK_DONE;
     }
 
-    if( job->pass == 2 )
+    if( job->pass_id == HB_PASS_ENCODE_2ND )
     {
         for(;;)
         {
@@ -335,7 +337,7 @@ int enctheoraWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
 
     th_encode_ycbcr_in( pv->ctx, ycbcr );
 
-    if( job->pass == 1 )
+    if( job->pass_id == HB_PASS_ENCODE_1ST )
     {
         unsigned char *buffer;
         int bytes;
index 8b39ab0ba846b0f50da4329e68fdded16ef05c38..780385d7b8e6042eadf33d826be81548ee184b5f 100644 (file)
@@ -123,7 +123,7 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job )
 
     /* Some HandBrake-specific defaults; users can override them
      * using the encoder_options string. */
-    if( job->pass == 2 && job->cfr != 1 )
+    if( job->pass_id == HB_PASS_ENCODE_2ND && job->cfr != 1 )
     {
         hb_interjob_t * interjob = hb_interjob_get( job->h );
         param.i_fps_num = interjob->vrate.num;
@@ -268,19 +268,20 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job )
         param.rc.i_rc_method = X264_RC_ABR;
         param.rc.i_bitrate = job->vbitrate;
         hb_log( "encx264: encoding at average bitrate %d", param.rc.i_bitrate );
-        if( job->pass > 0 && job->pass < 3 )
+        if( job->pass_id == HB_PASS_ENCODE_1ST ||
+            job->pass_id == HB_PASS_ENCODE_2ND )
         {
             memset( pv->filename, 0, 1024 );
             hb_get_tempory_filename( job->h, pv->filename, "x264.log" );
         }
-        switch( job->pass )
+        switch( job->pass_id )
         {
-            case 1:
+            case HB_PASS_ENCODE_1ST:
                 param.rc.b_stat_read  = 0;
                 param.rc.b_stat_write = 1;
                 param.rc.psz_stat_out = pv->filename;
                 break;
-            case 2:
+            case HB_PASS_ENCODE_2ND:
                 param.rc.b_stat_read  = 1;
                 param.rc.b_stat_write = 0;
                 param.rc.psz_stat_in  = pv->filename;
@@ -310,7 +311,7 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job )
     }
 
     /* Turbo first pass */
-    if( job->pass == 1 && job->fastfirstpass == 1 )
+    if( job->pass_id == HB_PASS_ENCODE_1ST && job->fastfirstpass == 1 )
     {
         x264_param_apply_fastfirstpass( &param );
     }
@@ -341,7 +342,7 @@ int encx264Init( hb_work_object_t * w, hb_job_t * job )
     }
     free( x264_opts_unparsed );
 
-    hb_deep_log( 2, "encx264: opening libx264 (pass %d)", job->pass );
+    hb_deep_log( 2, "encx264: opening libx264 (pass %d)", job->pass_id );
     pv->x264 = x264_encoder_open( &param );
     if ( pv->x264 == NULL )
     {
index d9b4be64e6e3eec1c7c8c78689a05e4b39b06536..324c07e5ba48a95d45067f92e5350c000aee3d31 100644 (file)
@@ -234,18 +234,19 @@ int encx265Init(hb_work_object_t *w, hb_job_t *job)
     {
         param->rc.rateControlMode = X265_RC_ABR;
         param->rc.bitrate         = job->vbitrate;
-        if (job->pass > 0 && job->pass < 3)
+        if (job->pass_id == HB_PASS_ENCODE_1ST ||
+            job->pass_id == HB_PASS_ENCODE_2ND)
         {
             char stats_file[1024] = "";
             char pass[2];
-            snprintf(pass, sizeof(pass), "%d", job->pass);
+            snprintf(pass, sizeof(pass), "%d", job->pass_id);
             hb_get_tempory_filename(job->h, stats_file, "x265.log");
             if (param_parse(param, "stats", stats_file) ||
                 param_parse(param, "pass", pass))
             {
                 goto fail;
             }
-            if (job->pass == 1 && job->fastfirstpass == 0 &&
+            if (job->pass_id == HB_PASS_ENCODE_1ST && job->fastfirstpass == 0 &&
                 param_parse(param, "slow-firstpass", "1"))
             {
                 goto fail;
index 9514de3eed064642d9ccf1ade5439a65a5412957..84347d768efc4064bb6a5d8dd96e36eacd0e30fd 100644 (file)
@@ -48,8 +48,6 @@ struct hb_handle_s
        from this one (see work.c) */
     hb_list_t    * jobs;
     hb_job_t     * current_job;
-    int            job_count;
-    int            job_count_permanent;
     volatile int   work_die;
     hb_error_code  work_error;
     hb_thread_t  * work_thread;
@@ -1255,7 +1253,7 @@ hb_job_t * hb_current_job( hb_handle_t * h )
  * @param h Handle to hb_handle_t.
  * @param job Handle to hb_job_t.
  */
-static void hb_add_internal( hb_handle_t * h, hb_job_t * job )
+static void hb_add_internal( hb_handle_t * h, hb_job_t * job, hb_list_t *list_pass )
 {
     hb_job_t      * job_copy;
     hb_audio_t    * audio;
@@ -1336,21 +1334,64 @@ static void hb_add_internal( hb_handle_t * h, hb_job_t * job )
         job_copy->file = strdup(job->file);
 
     job_copy->h     = h;
-    job_copy->pause = h->pause_lock;
 
     /* Copy the job filter list */
     job_copy->list_filter = hb_filter_list_copy( job->list_filter );
 
     /* Add the job to the list */
-    hb_list_add( h->jobs, job_copy );
-    h->job_count = hb_count(h);
-    h->job_count_permanent++;
+    hb_list_add( list_pass, job_copy );
+}
+
+hb_job_t* hb_job_copy(hb_job_t * job)
+{
+    hb_job_t      * job_copy;
+
+    /* Copy the job */
+    job_copy        = calloc( sizeof( hb_job_t ), 1 );
+    if (job_copy == NULL)
+        return NULL;
+
+    if (job->json != NULL)
+    {
+        // JSON jobs should only have the json string set.
+        job_copy->json = strdup(job->json);
+        return job_copy;
+    }
+    memcpy( job_copy, job, sizeof( hb_job_t ) );
+
+    job_copy->list_subtitle = hb_subtitle_list_copy( job->list_subtitle );
+    job_copy->list_chapter = hb_chapter_list_copy( job->list_chapter );
+    job_copy->list_audio = hb_audio_list_copy( job->list_audio );
+    job_copy->list_attachment = hb_attachment_list_copy( job->list_attachment );
+    job_copy->metadata = hb_metadata_copy( job->metadata );
+
+    if (job->encoder_preset != NULL)
+        job_copy->encoder_preset = strdup(job->encoder_preset);
+    if (job->encoder_tune != NULL)
+        job_copy->encoder_tune = strdup(job->encoder_tune);
+    if (job->encoder_options != NULL)
+        job_copy->encoder_options = strdup(job->encoder_options);
+    if (job->encoder_profile != NULL)
+        job_copy->encoder_profile = strdup(job->encoder_profile);
+    if (job->encoder_level != NULL)
+        job_copy->encoder_level = strdup(job->encoder_level);
+    if (job->file != NULL)
+        job_copy->file = strdup(job->file);
+
+    job_copy->list_filter = hb_filter_list_copy( job->list_filter );
+
+    return job_copy;
 }
 
 void hb_add( hb_handle_t * h, hb_job_t * job )
 {
-    int sub_id = 0;
+    hb_job_t *job_copy = hb_job_copy(job);
+    job_copy->h = h;
+    hb_list_add(h->jobs, job_copy);
+}
 
+void hb_job_setup_passes(hb_handle_t * h, hb_job_t * job, hb_list_t * list_pass)
+{
     if (job->vquality >= 0)
     {
         job->twopass = 0;
@@ -1358,26 +1399,22 @@ void hb_add( hb_handle_t * h, hb_job_t * job )
     if (job->indepth_scan)
     {
         hb_deep_log(2, "Adding subtitle scan pass");
-        job->pass = -1;
-        job->sequence_id = (job->sequence_id & 0xFFFFFF) | (sub_id++ << 24);
-        hb_add_internal(h, job);
+        job->pass_id = HB_PASS_SUBTITLE;
+        hb_add_internal(h, job, list_pass);
         job->indepth_scan = 0;
     }
     if (job->twopass)
     {
         hb_deep_log(2, "Adding two-pass encode");
-        job->pass = 1;
-        job->sequence_id = (job->sequence_id & 0xFFFFFF) | (sub_id++ << 24);
-        hb_add_internal(h, job);
-        job->pass = 2;
-        job->sequence_id = (job->sequence_id & 0xFFFFFF) | (sub_id++ << 24);
-        hb_add_internal(h, job);
+        job->pass_id = HB_PASS_ENCODE_1ST;
+        hb_add_internal(h, job, list_pass);
+        job->pass_id = HB_PASS_ENCODE_2ND;
+        hb_add_internal(h, job, list_pass);
     }
     else
     {
-        job->pass = 0;
-        job->sequence_id = (job->sequence_id & 0xFFFFFF) | (sub_id++ << 24);
-        hb_add_internal(h, job);
+        job->pass_id = HB_PASS_ENCODE;
+        hb_add_internal(h, job, list_pass);
     }
 }
 
@@ -1389,12 +1426,6 @@ void hb_add( hb_handle_t * h, hb_job_t * job )
 void hb_rem( hb_handle_t * h, hb_job_t * job )
 {
     hb_list_rem( h->jobs, job );
-
-    h->job_count = hb_count(h);
-    if (h->job_count_permanent)
-        h->job_count_permanent--;
-
-    /* XXX free everything XXX */
 }
 
 /**
@@ -1405,16 +1436,10 @@ void hb_rem( hb_handle_t * h, hb_job_t * job )
  */
 void hb_start( hb_handle_t * h )
 {
-    /* XXX Hack */
-    h->job_count = hb_list_count( h->jobs );
-    h->job_count_permanent = h->job_count;
-
     hb_lock( h->state_lock );
     h->state.state = HB_STATE_WORKING;
 #define p h->state.param.working
     p.progress  = 0.0;
-    p.job_cur   = 1;
-    p.job_count = h->job_count;
     p.rate_cur  = 0.0;
     p.rate_avg  = 0.0;
     p.hours     = -1;
@@ -1477,10 +1502,6 @@ void hb_resume( hb_handle_t * h )
 void hb_stop( hb_handle_t * h )
 {
     h->work_die = 1;
-
-    h->job_count = hb_count(h);
-    h->job_count_permanent = 0;
-
     hb_resume( h );
 }
 
@@ -1491,10 +1512,6 @@ void hb_stop( hb_handle_t * h )
 void hb_scan_stop( hb_handle_t * h )
 {
     h->scan_die = 1;
-
-    h->job_count = hb_count(h);
-    h->job_count_permanent = 0;
-
     hb_resume( h );
 }
 
@@ -1735,9 +1752,6 @@ static void thread_func( void * _h )
             h->state.state                = HB_STATE_WORKDONE;
             h->state.param.workdone.error = h->work_error;
 
-            h->job_count = hb_count(h);
-            if (h->job_count < 1)
-                h->job_count_permanent = 0;
             hb_unlock( h->state_lock );
         }
 
@@ -1813,14 +1827,6 @@ void hb_set_state( hb_handle_t * h, hb_state_t * s )
     if( h->state.state == HB_STATE_WORKING ||
         h->state.state == HB_STATE_SEARCHING )
     {
-        /* XXX Hack */
-        if (h->job_count < 1)
-            h->job_count_permanent = 1;
-
-        h->state.param.working.job_cur =
-            h->job_count_permanent - hb_list_count( h->jobs );
-        h->state.param.working.job_count = h->job_count_permanent;
-
         // Set which job is being worked on
         if (h->current_job)
             h->state.param.working.sequence_id = h->current_job->sequence_id & 0xFFFFFF;
index f374dbb505d941a2b8709ca2636f65bf2c652d32..f4b1bb81467bf379df0b8ab22bbdd729a2cd7609 100644 (file)
@@ -42,12 +42,13 @@ static json_t* hb_state_to_dict( hb_state_t * state)
     case HB_STATE_PAUSED:
     case HB_STATE_SEARCHING:
         dict = json_pack_ex(&error, 0,
-            "{s:o, s{s:o, s:o, s:o, s:o, s:o, s:o, s:o, s:o, s:o}}",
+            "{s:o, s{s:o, s:o, s:o, s:o, s:o, s:o, s:o, s:o, s:o, s:o}}",
             "State", json_integer(state->state),
             "Working",
                 "Progress",     json_real(state->param.working.progress),
-                "Job",          json_integer(state->param.working.job_cur),
-                "JobCount",     json_integer(state->param.working.job_count),
+                "PassID",       json_integer(state->param.working.pass_id),
+                "Pass",         json_integer(state->param.working.pass),
+                "PassCount",    json_integer(state->param.working.pass_count),
                 "Rate",         json_real(state->param.working.rate_cur),
                 "RateAvg",      json_real(state->param.working.rate_avg),
                 "Hours",        json_integer(state->param.working.hours),
@@ -217,6 +218,11 @@ static json_t* hb_title_to_dict( const hb_title_t * title )
         json_object_set_new(meta_dict, "LongDescription",
                             json_string(title->metadata->long_description));
     }
+    if (title->metadata->release_date != NULL)
+    {
+        json_object_set_new(meta_dict, "ReleaseDate",
+                            json_string(title->metadata->release_date));
+    }
 
     // process chapter list
     json_t * chapter_list = json_array();
@@ -554,6 +560,11 @@ char* hb_job_to_json( const hb_job_t * job )
         json_object_set_new(meta_dict, "LongDescription",
                             json_string(job->metadata->long_description));
     }
+    if (job->metadata->release_date != NULL)
+    {
+        json_object_set_new(meta_dict, "ReleaseDate",
+                            json_string(job->metadata->release_date));
+    }
 
     // process chapter list
     json_t *chapter_list = json_object_get(dest_dict, "ChapterList");
@@ -641,8 +652,7 @@ char* hb_job_to_json( const hb_job_t * job )
         else
         {
             subtitle_dict = json_pack_ex(&error, 0,
-            "{s:o, s:o, s:o, s:o, s:o, s:o}",
-                "ID",       json_integer(subtitle->id),
+            "{s:o, s:o, s:o, s:o, s:o}",
                 "Track",    json_integer(subtitle->track),
                 "Default",  json_boolean(subtitle->config.default_track),
                 "Force",    json_boolean(subtitle->config.force),
@@ -667,6 +677,40 @@ static int *        unpack_b(int *b)        { return b; }
 static char**       unpack_s(char **s)      { return s; }
 static json_t**     unpack_o(json_t** o)    { return o; }
 
+void hb_json_job_scan( hb_handle_t * h, const char * json_job )
+{
+    json_t * dict;
+    int result;
+    json_error_t error;
+
+    dict = json_loads(json_job, 0, NULL);
+
+    int title_index;
+    char *path = NULL;
+
+    result = json_unpack_ex(dict, &error, 0, "{s:{s:s, s:i}}",
+                            "Source",
+                                "Path",  unpack_s(&path),
+                                "Title", unpack_i(&title_index));
+    if (result < 0)
+    {
+        hb_error("json unpack failure, failed to find title: %s", error.text);
+        return;
+    }
+
+    hb_scan(h, path, title_index, 10, 0, 0);
+
+    // Wait for scan to complete
+    hb_state_t state;
+    do
+    {
+        hb_snooze(50);
+        hb_get_state2(h, &state);
+    } while (state.state == HB_STATE_SCANNING);
+
+    json_decref(dict);
+}
+
 /**
  * Convert a json string representation of a job to an hb_job_t
  * @param h        - Pointer to the hb_hanle_t hb instance which contains the
@@ -1084,12 +1128,10 @@ char* hb_job_init_json(hb_handle_t *h, int title_index)
  */
 int hb_add_json( hb_handle_t * h, const char * json_job )
 {
-    hb_job_t *job = hb_json_to_job(h, json_job);
-    if (job == NULL)
-        return -1;
+    hb_job_t job;
 
-    hb_add(h, job);
-    hb_job_close(&job);
+    job.json = json_job;
+    hb_add(h, &job);
 
     return 0;
 }
index fcd1683cec5de08141360933bfe4f3865b99fdc7..b5025a6c28dcac58ed108bab53fe14e58fda035b 100644 (file)
@@ -28,6 +28,7 @@ hb_image_t * hb_json_to_image(char *json_image);
 char       * hb_get_preview_params_json(int title_idx, int preview_idx,
                             int deinterlace, hb_geometry_settings_t *settings);
 char       * hb_get_preview_json(hb_handle_t * h, const char *json_param);
+void         hb_json_job_scan( hb_handle_t * h, const char * json_job );
 
 #ifdef __cplusplus
 }
index 13d12be29078d660d842b0dbf52deed1e0534687..759f1beb89deb9f29a1370ef96eca9857ad6d81d 100644 (file)
@@ -40,6 +40,7 @@ void         hb_title_close( hb_title_t ** );
  **********************************************************************/
 int  hb_get_pid( hb_handle_t * );
 void hb_set_state( hb_handle_t *, hb_state_t * );
+void hb_job_setup_passes(hb_handle_t *h, hb_job_t *job, hb_list_t *list_pass);
 
 /***********************************************************************
  * fifo.c
index 44993c27ca508ad8dd44287ed3fa040ba02473df..fb6326d1d2300233aaa883b65c3ae4db678b054f 100644 (file)
@@ -352,7 +352,7 @@ static int avformatInit( hb_mux_object_t * m )
     track->st->disposition |= AV_DISPOSITION_DEFAULT;
 
     hb_rational_t vrate;
-    if( job->pass == 2 )
+    if( job->pass_id == HB_PASS_ENCODE_2ND )
     {
         hb_interjob_t * interjob = hb_interjob_get( job->h );
         vrate = interjob->vrate;
index c66bf00121c1ad48b735f937a95d98ced41916ce..ca9502e4c83ec81e92e8c574a13b229cb91f8f17 100644 (file)
@@ -373,7 +373,8 @@ static int muxWork( hb_work_object_t * w, hb_buffer_t ** buf_in,
         hb_bitvec_set(mux->eof, pv->track);
         hb_bitvec_set(mux->rdy, pv->track);
     }
-    else if ((job->pass != 0 && job->pass != 2) ||
+    else if ((job->pass_id != HB_PASS_ENCODE &&
+              job->pass_id != HB_PASS_ENCODE_2ND) ||
              hb_bitvec_bit(mux->eof, pv->track))
     {
         hb_buffer_close( &buf );
@@ -471,7 +472,8 @@ void muxClose( hb_work_object_t * w )
         // may initiate optimization which can take a while and
         // we want the muxing state to be visible while this is
         // happening.
-        if( job->pass == 0 || job->pass == 2 )
+        if( job->pass_id == HB_PASS_ENCODE ||
+            job->pass_id == HB_PASS_ENCODE_2ND )
         {
             /* Update the UI */
             hb_state_t state;
@@ -487,7 +489,8 @@ void muxClose( hb_work_object_t * w )
         }
 
         // we're all done muxing -- print final stats and cleanup.
-        if( job->pass == 0 || job->pass == 2 )
+        if( job->pass_id == HB_PASS_ENCODE ||
+            job->pass_id == HB_PASS_ENCODE_2ND )
         {
             hb_stat_t sb;
             uint64_t bytes_total, frames_total;
@@ -612,7 +615,7 @@ hb_work_object_t * hb_muxer_init( hb_job_t * job )
     mux->pts = mux->interleave;
 
     /* Get a real muxer */
-    if( job->pass == 0 || job->pass == 2)
+    if( job->pass_id == HB_PASS_ENCODE || job->pass_id == HB_PASS_ENCODE_2ND )
     {
         switch( job->mux )
         {
index 80e9679594f99fd4ae8108528502f71d37026ad4..530196e4881506234b34f5c04b05cb3f10a0726f 100644 (file)
@@ -751,6 +751,7 @@ static void UpdateState( hb_work_private_t  * r, int64_t start)
         r->st_first = now;
     }
 
+    hb_get_state2(r->job->h, &state);
 #define p state.param.working
     if ( !r->job->indepth_scan )
     {
index 55c9bfc909d03b9966d32ddcf59a2afcf63d094e..32a5c60bb2533a6146efc9d6b40f8807e683bd69 100644 (file)
@@ -74,7 +74,19 @@ hb_thread_t * hb_scan_init( hb_handle_t * handle, volatile int * die,
     data->preview_count  = preview_count;
     data->store_previews = store_previews;
     data->min_title_duration = min_duration;
-    
+
+    // Initialize scan state
+    hb_state_t state;
+#define p state.param.scanning
+    state.state   = HB_STATE_SCANNING;
+    p.title_cur   = 1;
+    p.title_count = 1;
+    p.preview_cur = 0;
+    p.preview_count = 1;
+    p.progress = 0.0;
+#undef p
+    hb_set_state(handle, &state);
+
     return hb_thread_init( "scan", ScanFunc, data, HB_NORMAL_PRIORITY );
 }
 
@@ -169,7 +181,14 @@ static void ScanFunc( void * _data )
     }
     else
     {
-        data->title_index = 1;
+        // Title index 0 is not a valid title number and means scan all titles.
+        // So set title index to 1 in this scenario.
+        //
+        // Otherwise, set title index in new title to the index that was
+        // requested.  This preserves the original index created in batch
+        // mode.
+        if (data->title_index == 0)
+            data->title_index = 1;
         hb_title_t * title = hb_title_init( data->path, data->title_index );
         if ( (data->stream = hb_stream_open( data->path, title, 1 ) ) != NULL )
         {
index 81d8da2fc3b11147c330229e9591558069771cb4..9d01c1d00af7c5eb579a15d6d892a2726e978cd1 100644 (file)
@@ -1039,7 +1039,6 @@ hb_title_t * hb_stream_title_scan(hb_stream_t *stream, hb_title_t * title)
 
     // 'Barebones Title'
     title->type = HB_STREAM_TYPE;
-    title->index = 1;
 
     // Copy part of the stream path to the title name
     char *sep = hb_strr_dir_sep(stream->path);
@@ -5428,7 +5427,6 @@ static hb_title_t *ffmpeg_title_scan( hb_stream_t *stream, hb_title_t *title )
 
     // 'Barebones Title'
     title->type = HB_FF_STREAM_TYPE;
-    title->index = 1;
 
     // Copy part of the stream path to the title name
     char *sep = hb_strr_dir_sep(stream->path);
index 21fe8c2ae2a2d6ac2aedda8e33b2600be5f78adf..ac826584e3b22b4746d9c309998ad0d3b96a71f6 100644 (file)
@@ -155,7 +155,7 @@ hb_work_object_t * hb_sync_init( hb_job_t * job )
     pv->common->pts_offset   = INT64_MIN;
     sync->first_frame = 1;
 
-    if( job->pass == 2 )
+    if( job->pass_id == HB_PASS_ENCODE_2ND )
     {
         /* We already have an accurate frame count from pass 1 */
         hb_interjob_t * interjob = hb_interjob_get( job->h );
@@ -263,7 +263,7 @@ void syncVideoClose( hb_work_object_t * w )
             pv->common->count_frames, sync->count_frames_max );
 
     /* save data for second pass */
-    if( job->pass == 1 )
+    if( job->pass_id == HB_PASS_ENCODE_1ST )
     {
         /* Preserve frame count for better accuracy in pass 2 */
         hb_interjob_t * interjob = hb_interjob_get( job->h );
@@ -1638,6 +1638,7 @@ static void UpdateState( hb_work_object_t * w )
     hb_sync_video_t   * sync = &pv->type.video;
     hb_state_t state;
 
+    hb_get_state2( pv->job->h, &state );
     if( !pv->common->count_frames )
     {
         sync->st_first = hb_get_date();
index 4792c42b33988484f159f464ca54ae3d6ff341df..01060c01ff19e78e5da71077978c34d4310decb6 100644 (file)
@@ -58,18 +58,21 @@ hb_thread_t * hb_work_init( hb_list_t * jobs, volatile int * die, hb_error_code
     return hb_thread_init( "work", work_func, work, HB_LOW_PRIORITY );
 }
 
-static void InitWorkState( hb_handle_t * h )
+static void InitWorkState(hb_handle_t *h, int pass_id, int pass, int pass_count)
 {
     hb_state_t state;
 
-    state.state = HB_STATE_WORKING;
+    state.state  = HB_STATE_WORKING;
 #define p state.param.working
-    p.progress  = 0.0;
-    p.rate_cur  = 0.0;
-    p.rate_avg  = 0.0;
-    p.hours     = -1;
-    p.minutes   = -1;
-    p.seconds   = -1; 
+    p.pass_id    = pass_id;
+    p.pass       = pass;
+    p.pass_count = pass_count;
+    p.progress   = 0.0;
+    p.rate_cur   = 0.0;
+    p.rate_avg   = 0.0;
+    p.hours      = -1;
+    p.minutes    = -1;
+    p.seconds    = -1; 
 #undef p
 
     hb_set_state( h, &state );
@@ -90,12 +93,45 @@ static void work_func( void * _work )
     while( !*work->die && ( job = hb_list_item( work->jobs, 0 ) ) )
     {
         hb_list_rem( work->jobs, job );
-        job->die = work->die;
-        job->done_error = work->error;
-        *(work->current_job) = job;
-        InitWorkState( job->h );
-        do_job( job );
-        *(work->current_job) = NULL;
+        hb_list_t * passes = hb_list_init();
+
+        // JSON jobs get special treatment.  We want to perform the title
+        // scan for the JSON job automatically.  This requires that we delay
+        // filling the job struct till we have performed the title scan
+        // because the default values for the job come from the title.
+        if (job->json != NULL)
+        {
+            // Perform title scan for json job
+            hb_json_job_scan(job->h, job->json);
+
+            // Expand json string to full job struct
+            hb_job_t *new_job = hb_json_to_job(job->h, job->json);
+            new_job->h = job->h;
+            hb_job_close(&job);
+            job = new_job;
+        }
+        hb_job_setup_passes(job->h, job, passes);
+        hb_job_close(&job);
+
+        int pass_count, pass;
+        pass_count = hb_list_count(passes);
+        for (pass = 0; pass < pass_count && !*work->die; pass++)
+        {
+            job = hb_list_item(passes, pass);
+            job->die = work->die;
+            job->done_error = work->error;
+            *(work->current_job) = job;
+            InitWorkState(job->h, job->pass_id, pass + 1, pass_count);
+            do_job( job );
+            *(work->current_job) = NULL;
+        }
+        // Clean up any incomplete jobs
+        for (; pass < pass_count; pass++)
+        {
+            job = hb_list_item(passes, pass);
+            hb_job_close(&job);
+        }
+        hb_list_close(&passes);
     }
 
     free( work );
@@ -346,8 +382,8 @@ void hb_display_job_info(hb_job_t *job)
         }
         else
         {
-            hb_log( "     + bitrate: %d kbps, pass: %d", job->vbitrate, job->pass );
-            if(job->pass == 1 && job->fastfirstpass == 1 &&
+            hb_log( "     + bitrate: %d kbps, pass: %d", job->vbitrate, job->pass_id );
+            if(job->pass_id == HB_PASS_ENCODE_1ST && job->fastfirstpass == 1 &&
                (job->vcodec == HB_VCODEC_X264 || job->vcodec == HB_VCODEC_X265))
             {
                 hb_log( "     + fast first pass" );
@@ -538,7 +574,7 @@ static void do_job(hb_job_t *job)
     title = job->title;
     interjob = hb_interjob_get( job->h );
 
-    if( job->pass == 2 )
+    if( job->pass_id == HB_PASS_ENCODE_2ND )
     {
         correct_framerate( job );
     }
@@ -606,7 +642,8 @@ static void do_job(hb_job_t *job)
          * first burned subtitle (explicitly or after sanitizing) - which should
          * ensure that it doesn't get dropped. */
         interjob->select_subtitle->out_track = 1;
-        if (job->pass == 0 || job->pass == 2)
+        if (job->pass_id == HB_PASS_ENCODE ||
+            job->pass_id == HB_PASS_ENCODE_2ND)
         {
             // final pass, interjob->select_subtitle is no longer needed
             hb_list_insert(job->list_subtitle, 0, interjob->select_subtitle);
@@ -1463,8 +1500,8 @@ static void do_job(hb_job_t *job)
 
     hb_handle_t * h = job->h;
     hb_state_t state;
-    hb_get_state( h, &state );
-    
+    hb_get_state2( h, &state );
+
     hb_log("work: average encoding speed for job is %f fps", state.param.working.rate_avg);
 
     job->done = 1;
index f3ec9fcf674c91cd5e29a2479e80634e0d513334..89e40e02fc1c7cdc9bc64b617a4c6e60d3687494 100644 (file)
                  case HBStateWorking:
                  {
                      NSString *pass_desc = @"";
-                     if (p.job_cur == 1 && p.job_count > 1)
+                     if (p.pass_id == HB_PASS_SUBTITLE && p.pass_count > 1)
                      {
                          if ([self.currentJob.subtitles.tracks.firstObject[keySubTrackIndex] intValue] == -1)
                          {
                          string = [NSMutableString stringWithFormat:
                                    NSLocalizedString(@"Encoding: %@ \nPass %d %@ of %d, %.2f %%", @""),
                                    self.currentJob.destURL.lastPathComponent,
-                                   p.job_cur, pass_desc, p.job_count, 100.0 * p.progress];
+                                   p.pass, pass_desc, p.pass_count, 100.0 * p.progress];
                      }
                      else
                      {
                          string = [NSMutableString stringWithFormat:
                                    NSLocalizedString(@"Encoding: %@ \nPass %d of %d, %.2f %%", @""),
                                    self.currentJob.destURL.lastPathComponent,
-                                   p.job_cur, p.job_count, 100.0 * p.progress];
+                                   p.pass, p.pass_count, 100.0 * p.progress];
                      }
 
                      if (p.seconds > -1)
                          }
                      }
 
-                     progress = (p.progress + p.job_cur - 1) / p.job_count;
+                     progress = (p.progress + p.pass - 1) / p.pass_count;
 
                      // Update dock icon
                      if (self.dockIconProgress < 100.0 * progress)
index ee9a0d1f720c7a03b8f193cac035100223ac7cb8..bc66701939be373d88c1a525365b43ace4b41458 100644 (file)
@@ -2824,7 +2824,7 @@ static int HandleEvents( hb_handle_t * h )
 #define p s.param.working
         case HB_STATE_SEARCHING:
             fprintf( stdout, "\rEncoding: task %d of %d, Searching for start time, %.2f %%",
-                     p.job_cur, p.job_count, 100.0 * p.progress );
+                     p.pass, p.pass_count, 100.0 * p.progress );
             if( p.seconds > -1 )
             {
                 fprintf( stdout, " (ETA %02dh%02dm%02ds)", 
@@ -2835,7 +2835,7 @@ static int HandleEvents( hb_handle_t * h )
 
         case HB_STATE_WORKING:
             fprintf( stdout, "\rEncoding: task %d of %d, %.2f %%",
-                     p.job_cur, p.job_count, 100.0 * p.progress );
+                     p.pass, p.pass_count, 100.0 * p.progress );
             if( p.seconds > -1 )
             {
                 fprintf( stdout, " (%.2f fps, avg %.2f fps, ETA "