summaryrefslogtreecommitdiff
path: root/sys
diff options
context:
space:
mode:
authorNicolas Dufresne <nicolas.dufresne@collabora.com>2016-01-08 16:16:09 -0500
committerNicolas Dufresne <nicolas.dufresne@collabora.com>2016-01-21 13:49:18 -0500
commit09dbc5b2989f414f7921e39045cc7e97146178b6 (patch)
tree0ed9414095de9f1e5ff8f1decf34faf5d52f83b0 /sys
parent9a53d798765b54ff8e9e5acb25b13d4a3bc22edf (diff)
downloadgstreamer-plugins-bad-09dbc5b2989f414f7921e39045cc7e97146178b6.tar.gz
ahcsrc: Fix latency reporting
Currently it was wrongly reporting min/max as being the shortest and longest possible frame duration. This is not how latency works in GStreamer. Fix by reporting min latency as being the longest possible duration of one frame. As we don't know how many buffers the stack can accumulate, we simply assume that max latency is the same (the usual default behaviour).
Diffstat (limited to 'sys')
-rw-r--r--sys/androidmedia/gstahcsrc.c13
1 files changed, 6 insertions, 7 deletions
diff --git a/sys/androidmedia/gstahcsrc.c b/sys/androidmedia/gstahcsrc.c
index 127060036..bb8728981 100644
--- a/sys/androidmedia/gstahcsrc.c
+++ b/sys/androidmedia/gstahcsrc.c
@@ -2492,15 +2492,14 @@ gst_ahc_src_query (GstBaseSrc * bsrc, GstQuery * query)
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:{
- GstClockTime min, max;
+ GstClockTime min;
- gst_query_parse_latency (query, NULL, &min, &max);
- min = gst_util_uint64_scale (GST_SECOND, 1000, self->fps_max);
- max = gst_util_uint64_scale (GST_SECOND, 1000, self->fps_min);
+ /* Allow of 1 frame latency base on the longer frame duration */
+ gst_query_parse_latency (query, NULL, &min, NULL);
+ min = gst_util_uint64_scale (GST_SECOND, 1000, self->fps_min);
GST_DEBUG_OBJECT (self,
- "Reporting latency min: %" GST_TIME_FORMAT " max: %" GST_TIME_FORMAT,
- GST_TIME_ARGS (min), GST_TIME_ARGS (max));
- gst_query_set_latency (query, TRUE, min, max);
+ "Reporting latency min: %" GST_TIME_FORMAT, GST_TIME_ARGS (min));
+ gst_query_set_latency (query, TRUE, min, min);
return TRUE;
break;