pv->pp7_src = (uint8_t*)malloc( pv->pp7_temp_stride*(h+8)*sizeof(uint8_t) );
- int buf_size = 3 * width * height / 2;
- pv->buf_out = hb_buffer_init( buf_size );
+ pv->buf_out = hb_video_buffer_init( width, height );
return pv;
}
w = pv->job->title->width;
h = pv->job->title->height;
}
- hb_buffer_t *buf = hb_buffer_init( w * h * 3 / 2 );
+ hb_buffer_t *buf = hb_video_buffer_init( w, h );
uint8_t *dst = buf->data;
if ( context->pix_fmt != PIX_FMT_YUV420P || w != context->width ||
else
{
dst = copy_plane( dst, frame->data[0], w, frame->linesize[0], h );
- w >>= 1; h >>= 1;
+ w = (w + 1) >> 1; h = (h + 1) >> 1;
dst = copy_plane( dst, frame->data[1], w, frame->linesize[1], h );
dst = copy_plane( dst, frame->data[2], w, frame->linesize[2], h );
}
dst_h = job->title->height;
}
int dst_wh = dst_w * dst_h;
- hb_buffer_t *buf = hb_buffer_init( dst_wh + ( dst_wh >> 1 ) );
+ hb_buffer_t *buf = hb_video_buffer_init( dst_w, dst_h );
if ( dst_w != width || dst_h != height )
{
pv->width[1] = pv->width[2] = width >> 1;
pv->height[1] = pv->height[2] = height >> 1;
- int buf_size = 3 * width * height / 2;
- pv->buf_out[0] = hb_buffer_init( buf_size );
- pv->buf_out[1] = hb_buffer_init( buf_size );
+ pv->buf_out[0] = hb_video_buffer_init( width, height );
+ pv->buf_out[1] = hb_video_buffer_init( width, height );
pv->buf_settings = hb_buffer_init( 0 );
pv->yadif_deinterlaced_frames = 0;
pv->width[1] = pv->width[2] = width >> 1;
pv->height[1] = pv->height[2] = height >> 1;
- int buf_size = 3 * width * height / 2;
- pv->buf_out[0] = hb_buffer_init( buf_size );
- pv->buf_out[1] = hb_buffer_init( buf_size );
+ pv->buf_out[0] = hb_video_buffer_init( width, height );
+ pv->buf_out[1] = hb_video_buffer_init( width, height );
pv->buf_settings = hb_buffer_init( 0 );
pv->yadif_ready = 0;
hqdn3d_precalc_coef( pv->hqdn3d_coef[2], spatial_chroma );
hqdn3d_precalc_coef( pv->hqdn3d_coef[3], temporal_chroma );
- int buf_size = 3 * width * height / 2;
- pv->buf_out = hb_buffer_init( buf_size );
+ pv->buf_out = hb_video_buffer_init( width, height );
return pv;
}
pv->width[1] = pv->width[2] = width >> 1;
pv->height[1] = pv->height[2] = height >> 1;
- int buf_size = 3 * width * height / 2;
- pv->buf_out = hb_buffer_init( buf_size );
+ pv->buf_out = hb_video_buffer_init( width, height );
struct pullup_context * ctx;
pv->pullup_ctx = ctx = pullup_alloc_context();
frame->quality = pv->context->global_quality;
/* Should be way too large */
- buf = hb_buffer_init( 3 * job->width * job->height / 2 );
+ buf = hb_video_buffer_init( job->width, job->height );
buf->size = avcodec_encode_video( pv->context, buf->data, buf->alloc,
frame );
buf->start = in->start;
yuv.y_height = job->height;
yuv.y_stride = job->width;
- yuv.uv_width = job->width / 2;
- yuv.uv_height = job->height / 2;
- yuv.uv_stride = job->width / 2;
+ yuv.uv_width = (job->width + 1) / 2;
+ yuv.uv_height = (job->height + 1) / 2;
+ yuv.uv_stride = yuv.uv_width;
yuv.y = in->data;
yuv.u = in->data + job->width * job->height;
- yuv.v = in->data + job->width * job->height * 5/4;
+ yuv.v = in->data + yuv.uv_width * yuv.uv_height;
theora_encode_YUVin(&pv->theora, &yuv);
x264_picture_alloc( &pv->pic_in, X264_CSP_I420,
job->width, job->height );
+ pv->pic_in.img.i_stride[2] = pv->pic_in.img.i_stride[1] = ( ( job->width + 1 ) >> 1 );
pv->x264_allocated_pic = pv->pic_in.img.plane[0];
if (job->areBframes)
hb_job_t *job = pv->job;
/* Should be way too large */
- buf = hb_buffer_init( 3 * job->width * job->height / 2 );
+ buf = hb_video_buffer_init( job->width, job->height );
buf->size = 0;
buf->frametype = 0;
/* Point x264 at our current buffers Y(UV) data. */
pv->pic_in.img.plane[0] = in->data;
+ int uvsize = ( (job->width + 1) >> 1 ) * ( (job->height + 1) >> 1 );
if( job->grayscale )
{
/* XXX x264 has currently no option for grayscale encoding */
- memset( pv->pic_in.img.plane[1], 0x80, job->width * job->height / 4 );
- memset( pv->pic_in.img.plane[2], 0x80, job->width * job->height / 4 );
+ memset( pv->pic_in.img.plane[1], 0x80, uvsize );
+ memset( pv->pic_in.img.plane[2], 0x80, uvsize );
}
else
{
/* Point x264 at our buffers (Y)UV data */
pv->pic_in.img.plane[1] = in->data + job->width * job->height;
- pv->pic_in.img.plane[2] = in->data + 5 * job->width * job->height / 4;
+ pv->pic_in.img.plane[2] = pv->pic_in.img.plane[1] + uvsize;
}
if( in->new_chap && job->chapter_markers )
{
}
/* Should be way too large */
- buf = hb_buffer_init( 3 * job->width * job->height / 2 );
+ buf = hb_video_buffer_init( job->width, job->height );
buf->start = in->start;
buf->stop = in->stop;
void hb_fifo_push_head( hb_fifo_t *, hb_buffer_t * );
void hb_fifo_close( hb_fifo_t ** );
+// this routine gets a buffer for an uncompressed YUV420 video frame
+// with dimensions width x height.
+static inline hb_buffer_t * hb_video_buffer_init( int width, int height )
+{
+ // Y requires w x h bytes. U & V each require (w+1)/2 x
+ // (h+1)/2 bytes (the "+1" is to round up). We shift rather
+ // than divide by 2 since the compiler can't know these ints
+ // are positive so it generates very expensive integer divides
+ // if we do "/2". The code here matches the calculation for
+ // PIX_FMT_YUV420P in ffmpeg's avpicture_fill() which is required
+ // for most of HB's filters to work right.
+ return hb_buffer_init( width * height + ( ( width+1 ) >> 1 ) *
+ ( ( height+1 ) >> 1 ) * 2 );
+}
+
/***********************************************************************
* Threads: update.c, scan.c, work.c, reader.c, muxcommon.c
**********************************************************************/
*/
static uint8_t *getU(uint8_t *data, int width, int height, int x, int y)
{
- return(&data[(((y/2) * (width/2)) + (x/2)) + (width*height)]);
+ return(&data[(y>>1) * ((width+1)>>1) + (x>>1) + width*height]);
}
static uint8_t *getV(uint8_t *data, int width, int height, int x, int y)
{
- return(&data[(((y/2) * (width/2)) + (x/2)) + (width*height) +
- (width*height)/4]);
+ int w2 = (width+1) >> 1, h2 = (height+1) >> 1;
+ return(&data[(y>>1) * w2 + (x>>1) + width*height + w2*h2]);
}
static void ApplySub( hb_job_t * job, hb_buffer_t * buf,
}
/* Setup render buffer */
- hb_buffer_t * buf_render = hb_buffer_init( 3 * job->width * job->height / 2 );
+ hb_buffer_t * buf_render = hb_video_buffer_init( job->width, job->height );
/* Apply filters */
if( job->filters )