status->state &= ~GHB_STATE_SEARCHING;
}
#undef p
-#define p state->param.workdone
+#define p state->param.working
if (state->state & HB_STATE_WORKDONE)
{
status->state |= GHB_STATE_WORKDONE;
status->state &= ~GHB_STATE_PAUSED;
status->state &= ~GHB_STATE_WORKING;
status->state &= ~GHB_STATE_SEARCHING;
+ status->unique_id = p.sequence_id;
switch (p.error)
{
case HB_ERROR_NONE:
struct
{
- /* HB_STATE_WORKING */
+ /* HB_STATE_WORKING || HB_STATE_SEARCHING || HB_STATE_WORKDONE */
#define HB_PASS_SUBTITLE -1
#define HB_PASS_ENCODE 0
#define HB_PASS_ENCODE_1ST 1 // Some code depends on these values being
int minutes;
int seconds;
int sequence_id;
- } working;
-
- struct
- {
- /* HB_STATE_WORKDONE */
hb_error_code error;
- } workdone;
+ } working;
struct
{
{
hb_thread_close( &h->work_thread );
- hb_log( "libhb: work result = %d",
- h->work_error );
+ hb_log( "libhb: work result = %d", h->work_error );
hb_lock( h->state_lock );
- h->state.state = HB_STATE_WORKDONE;
- h->state.param.workdone.error = h->work_error;
+ h->state.state = HB_STATE_WORKDONE;
+ h->state.param.working.error = h->work_error;
hb_unlock( h->state_lock );
}
break;
case HB_STATE_WORKDONE:
dict = json_pack_ex(&error, 0,
- "{s:o, s{s:o}}",
+ "{s:o, s{s:o, s:o}}",
"State", hb_value_string(state_s),
"WorkDone",
- "Error", hb_value_int(state->param.workdone.error));
+ "SequenceID", hb_value_int(state->param.working.sequence_id),
+ "Error", hb_value_int(state->param.working.error));
break;
case HB_STATE_MUXING:
dict = json_pack_ex(&error, 0,
}
+static void SetWorkdoneState(hb_job_t *job)
+{
+ hb_state_t state;
+
+
+ if (job == NULL)
+ {
+ return;
+ }
+ hb_get_state2(job->h, &state);
+
+ state.state = HB_STATE_WORKDONE;
+ state.param.working.error = *job->done_error;
+ state.param.working.sequence_id = job->sequence_id;
+
+ hb_set_state( job->h, &state );
+
+}
+
/**
* Iterates through job list and calls do_job for each job.
* @param _work Handle work object.
do_job( job );
*(work->current_job) = NULL;
}
- // Clean up any incomplete jobs
- for (; pass < pass_count; pass++)
+ SetWorkdoneState(job);
+
+ // Clean job passes
+ for (pass = 0; pass < pass_count; pass++)
{
job = hb_list_item(passes, pass);
hb_job_close(&job);
}
hb_buffer_pool_free();
-
- hb_job_close(&job);
}
static inline void copy_chapter( hb_buffer_t * dst, hb_buffer_t * src )
}
HBCoreResult result = HBCoreResultDone;
- switch (_hb_state->param.workdone.error)
+ switch (_hb_state->param.working.error)
{
case HB_ERROR_NONE:
result = HBCoreResultDone;
}
#undef p
-#define p s.param.workdone
+#define p s.param.working
case HB_STATE_WORKDONE:
/* Print error if any, then exit */
if (json)