summaryrefslogtreecommitdiff
path: root/libavcodec/cuvid.c
diff options
context:
space:
mode:
authorPhilip Langdale <philipl@overt.org>2016-09-03 12:52:52 -0700
committerPhilip Langdale <philipl@overt.org>2016-09-22 18:39:46 -0700
commit289a6bb8b11822aaea1b174d5d938a081e19a084 (patch)
tree4b9c2abcb805e14ac6c69a249525cd350d0a7f84 /libavcodec/cuvid.c
parent843aff3cf7ad1d1f1549b9c1d3892589ba1cfdda (diff)
downloadffmpeg-289a6bb8b11822aaea1b174d5d938a081e19a084.tar.gz
cuvid: Pass bit depth information to decoder
Although cuvid can only output 8bit, it can consume HEVC Main10 if the bit depth is set properly. In cases where >8bit is not supported, this change is still beneficial as the decoder will fail to be created instead of plowing throw and decoding as 8bit.
Diffstat (limited to 'libavcodec/cuvid.c')
-rw-r--r--libavcodec/cuvid.c2
1 files changed, 2 insertions, 0 deletions
diff --git a/libavcodec/cuvid.c b/libavcodec/cuvid.c
index 2570848d0f..ddf8b72efe 100644
--- a/libavcodec/cuvid.c
+++ b/libavcodec/cuvid.c
@@ -181,6 +181,7 @@ static int CUDAAPI cuvid_handle_video_sequence(void *opaque, CUVIDEOFORMAT* form
cuinfo.ulNumDecodeSurfaces = MAX_FRAME_COUNT;
cuinfo.ulNumOutputSurfaces = 1;
cuinfo.ulCreationFlags = cudaVideoCreate_PreferCUVID;
+ cuinfo.bitDepthMinus8 = format->bit_depth_luma_minus8;
if (format->progressive_sequence) {
ctx->deint_mode = cuinfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Weave;
@@ -573,6 +574,7 @@ static int cuvid_test_dummy_decoder(AVCodecContext *avctx, CUVIDPARSERPARAMS *cu
cuinfo.ulNumDecodeSurfaces = MAX_FRAME_COUNT;
cuinfo.ulNumOutputSurfaces = 1;
cuinfo.ulCreationFlags = cudaVideoCreate_PreferCUVID;
+ cuinfo.bitDepthMinus8 = 0;
cuinfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Weave;