summaryrefslogtreecommitdiff
path: root/sys/applemedia/avsamplevideosink.m
diff options
context:
space:
mode:
Diffstat (limited to 'sys/applemedia/avsamplevideosink.m')
-rw-r--r--sys/applemedia/avsamplevideosink.m39
1 files changed, 22 insertions, 17 deletions
diff --git a/sys/applemedia/avsamplevideosink.m b/sys/applemedia/avsamplevideosink.m
index ce17e98f3..8e2d79ba1 100644
--- a/sys/applemedia/avsamplevideosink.m
+++ b/sys/applemedia/avsamplevideosink.m
@@ -156,11 +156,11 @@ static void
gst_av_sample_video_sink_finalize (GObject * object)
{
GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (object);
- __block AVSampleBufferDisplayLayer *layer = av_sink->layer;
+ __block gpointer layer = av_sink->layer;
if (layer) {
dispatch_async (dispatch_get_main_queue (), ^{
- [layer release];
+ CFBridgingRelease(layer);
});
}
@@ -198,19 +198,21 @@ gst_av_sample_video_sink_start (GstBaseSink * bsink)
GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink);
if ([NSThread isMainThread]) {
- av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init];
+ AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
+ av_sink->layer = (__bridge_retained gpointer)layer;
if (av_sink->keep_aspect_ratio)
- av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
+ layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
- av_sink->layer.videoGravity = AVLayerVideoGravityResize;
+ layer.videoGravity = AVLayerVideoGravityResize;
g_object_notify (G_OBJECT (av_sink), "layer");
} else {
dispatch_sync (dispatch_get_main_queue (), ^{
- av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init];
+ AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init];
+ av_sink->layer = (__bridge_retained gpointer)layer;
if (av_sink->keep_aspect_ratio)
- av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
+ layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
- av_sink->layer.videoGravity = AVLayerVideoGravityResize;
+ layer.videoGravity = AVLayerVideoGravityResize;
g_object_notify (G_OBJECT (av_sink), "layer");
});
}
@@ -224,7 +226,7 @@ _stop_requesting_data (GstAVSampleVideoSink * av_sink)
{
if (av_sink->layer) {
if (av_sink->layer_requesting_data)
- [av_sink->layer stopRequestingMediaData];
+ [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) stopRequestingMediaData];
av_sink->layer_requesting_data = FALSE;
}
}
@@ -243,7 +245,7 @@ gst_av_sample_video_sink_stop (GstBaseSink * bsink)
g_mutex_lock (&av_sink->render_lock);
_stop_requesting_data (av_sink);
g_mutex_unlock (&av_sink->render_lock);
- [av_sink->layer flushAndRemoveImage];
+ [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) flushAndRemoveImage];
}
return TRUE;
@@ -661,11 +663,12 @@ _enqueue_sample (GstAVSampleVideoSink * av_sink, GstBuffer *buf)
kCFBooleanTrue);
}
+ AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
if (av_sink->keep_aspect_ratio)
- av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect;
+ layer.videoGravity = AVLayerVideoGravityResizeAspect;
else
- av_sink->layer.videoGravity = AVLayerVideoGravityResize;
- [av_sink->layer enqueueSampleBuffer:sample_buf];
+ layer.videoGravity = AVLayerVideoGravityResize;
+ [layer enqueueSampleBuffer:sample_buf];
CFRelease (pbuf);
CFRelease (sample_buf);
@@ -678,13 +681,14 @@ _request_data (GstAVSampleVideoSink * av_sink)
{
av_sink->layer_requesting_data = TRUE;
- [av_sink->layer requestMediaDataWhenReadyOnQueue:
+ AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
+ [layer requestMediaDataWhenReadyOnQueue:
dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
usingBlock:^{
while (TRUE) {
/* don't needlessly fill up avsamplebufferdisplaylayer's queue.
* This also allows us to skip displaying late frames */
- if (!av_sink->layer.readyForMoreMediaData)
+ if (!layer.readyForMoreMediaData)
break;
g_mutex_lock (&av_sink->render_lock);
@@ -752,9 +756,10 @@ gst_av_sample_video_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf)
MAC_OS_X_VERSION_MAX_ALLOWED >= 1010 && \
defined(MAC_OS_X_VERSION_MIN_REQUIRED) && \
MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4
- if ([av_sink->layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
+ AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink);
+ if ([layer status] == AVQueuedSampleBufferRenderingStatusFailed) {
GST_ERROR_OBJECT (av_sink, "failed to enqueue buffer on layer, %s",
- [[[av_sink->layer error] description] UTF8String]);
+ [[[layer error] description] UTF8String]);
return GST_FLOW_ERROR;
}
#endif