diff options
author | Nick Kallen <nickkallen@me.com> | 2017-02-03 14:46:39 +0100 |
---|---|---|
committer | Sebastian Dröge <sebastian@centricular.com> | 2017-03-07 13:37:07 +0200 |
commit | 46bbc60d24c1914380b4145fa8595aa2e958c7dc (patch) | |
tree | 85c98124552b6638d63b554a950c352c4003c573 /sys | |
parent | 201e71c3aacd4c61771fd6ac20dc39e43e42bb0c (diff) | |
download | gstreamer-plugins-bad-46bbc60d24c1914380b4145fa8595aa2e958c7dc.tar.gz |
applemedia/gl: Update code to use ARC
All code interacting with Objective-C objects should now use Automated
Reference Counting rather than manual memory management or Garbage
Collection. Because ARC prohibits C-structs from containing
references to Objective-C objects, all such fields are now typed
'gpointer'. Setting and gettings Objective-C fields on such a
struct now uses explicit __bridge_* calls to tell ARC about
object lifetimes.
https://bugzilla.gnome.org/show_bug.cgi?id=777847
Diffstat (limited to 'sys')
-rw-r--r-- | sys/applemedia/Makefile.am | 1 | ||||
-rw-r--r-- | sys/applemedia/avfassetsrc.h | 11 | ||||
-rw-r--r-- | sys/applemedia/avfassetsrc.m | 108 | ||||
-rw-r--r-- | sys/applemedia/avfvideosrc.h | 4 | ||||
-rw-r--r-- | sys/applemedia/avfvideosrc.m | 62 | ||||
-rw-r--r-- | sys/applemedia/avsamplevideosink.h | 6 | ||||
-rw-r--r-- | sys/applemedia/avsamplevideosink.m | 39 | ||||
-rw-r--r-- | sys/applemedia/iosassetsrc.h | 15 | ||||
-rw-r--r-- | sys/applemedia/iosassetsrc.m | 74 | ||||
-rw-r--r-- | sys/applemedia/plugin.m | 1 | ||||
-rw-r--r-- | sys/applemedia/videotexturecache.m | 9 |
11 files changed, 126 insertions, 204 deletions
diff --git a/sys/applemedia/Makefile.am b/sys/applemedia/Makefile.am index d77e47c7b..c2720cf50 100644 --- a/sys/applemedia/Makefile.am +++ b/sys/applemedia/Makefile.am @@ -35,6 +35,7 @@ endif libgstapplemedia_la_OBJCFLAGS = \ -I$(top_srcdir)/gst-libs \ -I$(top_builddir)/gst-libs \ + -fobjc-arc \ $(GST_OBJCFLAGS_WITH_VERSION) \ $(GST_BASE_CFLAGS) \ $(GST_PLUGINS_BASE_CFLAGS) diff --git a/sys/applemedia/avfassetsrc.h b/sys/applemedia/avfassetsrc.h index debbb5a24..84df64508 100644 --- a/sys/applemedia/avfassetsrc.h +++ b/sys/applemedia/avfassetsrc.h @@ -37,6 +37,8 @@ G_BEGIN_DECLS #define GST_TYPE_AVF_ASSET_SRC \ (gst_avf_asset_src_get_type()) +#define GST_AVF_ASSET_SRC_READER(obj) \ + ((__bridge GstAVFAssetReader *)(obj->reader)) #define GST_AVF_ASSET_SRC(obj) \ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AVF_ASSET_SRC,GstAVFAssetSrc)) #define GST_AVF_ASSET_SRC_CLASS(klass) \ @@ -95,9 +97,9 @@ typedef enum - (void) start : (GError **) error; - (void) stop; - (void) seekTo: (guint64) start : (guint64) stop : (GError **) error; -- (bool) hasMediaType: (GstAVFAssetReaderMediaType) type; +- (BOOL) hasMediaType: (GstAVFAssetReaderMediaType) type; - (GstCaps *) getCaps: (GstAVFAssetReaderMediaType) type; -- (bool) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index; +- (BOOL) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index; - (GstBuffer *) nextBuffer: (GstAVFAssetReaderMediaType) type : (GError **) error; @end @@ -110,7 +112,10 @@ struct _GstAVFAssetSrc gint selected_video_track; gint selected_audio_track; - GstAVFAssetReader *reader; + /* NOTE: ARC no longer allows Objective-C pointers in structs. */ + /* Instead, use gpointer with explicit __bridge_* calls */ + gpointer reader; + GstAVFAssetSrcState state; GMutex lock; GstEvent *seek_event; diff --git a/sys/applemedia/avfassetsrc.m b/sys/applemedia/avfassetsrc.m index 7cb9855b7..f7de87e15 100644 --- a/sys/applemedia/avfassetsrc.m +++ b/sys/applemedia/avfassetsrc.m @@ -51,15 +51,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_avf_asset_src_debug); #define MEDIA_TYPE_TO_STR(x) \ (x == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO ? "audio" : "video") #define AVF_ASSET_READER_HAS_AUDIO(x) \ - ([self->reader hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]) + ([GST_AVF_ASSET_SRC_READER(self) hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]) #define AVF_ASSET_READER_HAS_VIDEO(x) \ - ([self->reader hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]) -#define OBJC_CALLOUT_BEGIN() \ - NSAutoreleasePool *pool; \ - \ - pool = [[NSAutoreleasePool alloc] init] -#define OBJC_CALLOUT_END() \ - [pool release] + ([GST_AVF_ASSET_SRC_READER(self) hasMediaType:GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]) enum { @@ -242,7 +236,6 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition) gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)), gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition))); - OBJC_CALLOUT_BEGIN (); switch (transition) { case GST_STATE_CHANGE_NULL_TO_READY: { self->state = GST_AVF_ASSET_SRC_STATE_STOPPED; @@ -252,7 +245,7 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition) gst_avf_asset_src_stop_all (self); return GST_STATE_CHANGE_FAILURE; } - self->reader = [[GstAVFAssetReader alloc] initWithURI:self->uri:&error]; + self->reader = (__bridge_retained gpointer)([[GstAVFAssetReader alloc] initWithURI:self->uri:&error]); if (error) { GST_ELEMENT_ERROR (element, RESOURCE, FAILED, ("AVFAssetReader error"), ("%s", error->message)); @@ -282,12 +275,11 @@ gst_avf_asset_src_change_state (GstElement * element, GstStateChange transition) gst_avf_asset_src_stop (self); break; case GST_STATE_CHANGE_READY_TO_NULL: - [self->reader release]; + CFBridgingRelease(self->reader); break; default: break; } - OBJC_CALLOUT_END (); return ret; } @@ -323,18 +315,18 @@ gst_avf_asset_src_query (GstPad *pad, GstObject * parent, GstQuery *query) ret = TRUE; break; case GST_QUERY_DURATION: - gst_query_set_duration (query, GST_FORMAT_TIME, self->reader.duration); + gst_query_set_duration (query, GST_FORMAT_TIME, GST_AVF_ASSET_SRC_READER(self).duration); ret = TRUE; break; case GST_QUERY_POSITION: - gst_query_set_position (query, GST_FORMAT_TIME, self->reader.position); + gst_query_set_position (query, GST_FORMAT_TIME, GST_AVF_ASSET_SRC_READER(self).position); ret = TRUE; break; case GST_QUERY_SEEKING: { GstFormat fmt; gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL); if (fmt == GST_FORMAT_TIME) { - gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, self->reader.duration); + gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0, GST_AVF_ASSET_SRC_READER(self).duration); ret = TRUE; } break; @@ -362,7 +354,6 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event) gboolean res = TRUE; GError *error = NULL; - OBJC_CALLOUT_BEGIN (); self = GST_AVF_ASSET_SRC (gst_pad_get_parent_element (pad)); switch (GST_EVENT_TYPE (event)) { @@ -409,7 +400,7 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event) stop = GST_CLOCK_TIME_NONE; } gst_avf_asset_src_send_event (self, gst_event_new_flush_start ()); - [self->reader seekTo: start: stop: &error]; + [GST_AVF_ASSET_SRC_READER(self) seekTo: start: stop: &error]; gst_segment_init (&segment, GST_FORMAT_TIME); segment.rate = rate; @@ -439,7 +430,6 @@ gst_avf_asset_src_event (GstPad * pad, GstObject * parent, GstEvent * event) } gst_object_unref (self); - OBJC_CALLOUT_END (); return res; } @@ -501,15 +491,14 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad, GstFlowReturn ret, combined_ret; GError *error; - OBJC_CALLOUT_BEGIN (); GST_AVF_ASSET_SRC_LOCK (self); if (self->state != GST_AVF_ASSET_SRC_STATE_READING) { GST_AVF_ASSET_SRC_UNLOCK (self); - goto exit; + return; } - buf = [self->reader nextBuffer:type:&error]; + buf = [GST_AVF_ASSET_SRC_READER(self) nextBuffer:type:&error]; GST_AVF_ASSET_SRC_UNLOCK (self); if (buf == NULL) { @@ -520,13 +509,13 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad, gst_avf_asset_src_combine_flows (self, type, GST_FLOW_ERROR); gst_pad_pause_task (pad); - goto exit; + return; } gst_pad_push_event (pad, gst_event_new_eos ()); gst_avf_asset_src_combine_flows (self, type, GST_FLOW_EOS); gst_pad_pause_task (pad); - goto exit; + return; } ret = gst_pad_push (pad, buf); @@ -547,8 +536,6 @@ gst_avf_asset_src_read_data (GstAVFAssetSrc *self, GstPad *pad, gst_pad_pause_task (pad); } -exit: - OBJC_CALLOUT_END (); } static void @@ -571,9 +558,8 @@ gst_avf_asset_src_start_reader (GstAVFAssetSrc * self) GError *error = NULL; gboolean ret = TRUE; - OBJC_CALLOUT_BEGIN (); - [self->reader start: &error]; + [GST_AVF_ASSET_SRC_READER(self) start: &error]; if (error != NULL) { GST_ELEMENT_ERROR (self, RESOURCE, FAILED, ("AVFAssetReader could not start reading"), ("%s", error->message)); @@ -583,7 +569,6 @@ gst_avf_asset_src_start_reader (GstAVFAssetSrc * self) } exit: - OBJC_CALLOUT_END (); return ret; } @@ -592,7 +577,6 @@ gst_avf_asset_src_send_event (GstAVFAssetSrc *self, GstEvent *event) { gboolean ret = TRUE; - OBJC_CALLOUT_BEGIN (); if (AVF_ASSET_READER_HAS_VIDEO (self)) { ret |= gst_pad_push_event (self->videopad, gst_event_ref (event)); @@ -602,7 +586,6 @@ gst_avf_asset_src_send_event (GstAVFAssetSrc *self, GstEvent *event) } gst_event_unref (event); - OBJC_CALLOUT_END (); return ret; } @@ -611,25 +594,24 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self) { GstSegment segment; - OBJC_CALLOUT_BEGIN (); if (self->state == GST_AVF_ASSET_SRC_STATE_STARTED) { - goto exit; + return; } GST_DEBUG_OBJECT (self, "Creating pads and starting reader"); gst_segment_init (&segment, GST_FORMAT_TIME); - segment.duration = self->reader.duration; + segment.duration = GST_AVF_ASSET_SRC_READER(self).duration; /* We call AVFAssetReader's startReading when the pads are linked * and no outputs can be added afterwards, so the tracks must be * selected before adding any of the new pads */ if (AVF_ASSET_READER_HAS_AUDIO (self)) { - [self->reader selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO: + [GST_AVF_ASSET_SRC_READER(self) selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO: self->selected_audio_track]; } if (AVF_ASSET_READER_HAS_VIDEO (self)) { - [self->reader selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO: + [GST_AVF_ASSET_SRC_READER(self) selectTrack: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO: self->selected_video_track]; } @@ -643,9 +625,9 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self) gst_pad_set_active (self->audiopad, TRUE); gst_avf_asset_src_send_start_stream (self, self->audiopad); gst_pad_set_caps (self->audiopad, - [self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]); + [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO]); gst_pad_push_event (self->audiopad, gst_event_new_caps ( - [self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO])); + [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO])); gst_pad_push_event (self->audiopad, gst_event_new_segment (&segment)); gst_element_add_pad (GST_ELEMENT (self), self->audiopad); } @@ -659,35 +641,31 @@ gst_avf_asset_src_start (GstAVFAssetSrc *self) gst_pad_set_active (self->videopad, TRUE); gst_avf_asset_src_send_start_stream (self, self->videopad); gst_pad_set_caps (self->videopad, - [self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]); + [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO]); gst_pad_push_event (self->videopad, gst_event_new_caps ( - [self->reader getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO])); + [GST_AVF_ASSET_SRC_READER(self) getCaps: GST_AVF_ASSET_READER_MEDIA_TYPE_VIDEO])); gst_pad_push_event (self->videopad, gst_event_new_segment (&segment)); gst_element_add_pad (GST_ELEMENT (self), self->videopad); } gst_element_no_more_pads (GST_ELEMENT (self)); self->state = GST_AVF_ASSET_SRC_STATE_STARTED; - -exit: - OBJC_CALLOUT_END (); } static void gst_avf_asset_src_stop (GstAVFAssetSrc *self) { gboolean has_audio, has_video; - OBJC_CALLOUT_BEGIN(); if (self->state == GST_AVF_ASSET_SRC_STATE_STOPPED) { - goto exit; + return; } GST_DEBUG ("Stopping tasks and removing pads"); has_audio = AVF_ASSET_READER_HAS_AUDIO (self); has_video = AVF_ASSET_READER_HAS_VIDEO (self); - [self->reader stop]; + [GST_AVF_ASSET_SRC_READER(self) stop]; if (has_audio) { gst_pad_stop_task (self->audiopad); @@ -699,9 +677,6 @@ gst_avf_asset_src_stop (GstAVFAssetSrc *self) } self->state = GST_AVF_ASSET_SRC_STATE_STOPPED; - -exit: - OBJC_CALLOUT_END (); } static gboolean @@ -811,7 +786,6 @@ gst_avf_asset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri, GErro AVAsset *asset; gboolean ret = FALSE; - OBJC_CALLOUT_BEGIN (); str = [NSString stringWithUTF8String: uri]; url = [[NSURL alloc] initWithString: str]; asset = [AVAsset assetWithURL: url]; @@ -824,7 +798,6 @@ gst_avf_asset_src_uri_set_uri (GstURIHandler * handler, const gchar * uri, GErro g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI, "Invalid URI '%s' for avfassetsrc", uri); } - OBJC_CALLOUT_END (); return ret; } @@ -872,11 +845,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) - (void) releaseReader { - [video_track release]; - [audio_track release]; - [video_tracks release]; - [audio_tracks release]; - [reader release]; + video_track = nil; + audio_track = nil; + video_tracks = nil; + audio_tracks = nil; + reader = nil; } - (void) initReader: (GError **) error @@ -889,13 +862,12 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) [nserror.description UTF8String]); *error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_INIT, "%s", [nserror.description UTF8String]); - [asset release]; - [reader release]; + return; } - audio_tracks = [[asset tracksWithMediaType:AVMediaTypeAudio] retain]; - video_tracks = [[asset tracksWithMediaType:AVMediaTypeVideo] retain]; + audio_tracks = [asset tracksWithMediaType:AVMediaTypeAudio]; + video_tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; reader.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); GST_INFO ("Found %lu video tracks and %lu audio tracks", (unsigned long)[video_tracks count], (unsigned long)[audio_tracks count]); @@ -911,12 +883,12 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) str = [NSString stringWithUTF8String: uri]; url = [[NSURL alloc] initWithString: str]; - asset = [[AVAsset assetWithURL: url] retain]; + asset = [AVAsset assetWithURL: url]; if (!asset.playable) { *error = g_error_new (GST_AVF_ASSET_SRC_ERROR, GST_AVF_ASSET_ERROR_NOT_PLAYABLE, "Media is not playable"); - [asset release]; + asset = nil; return nil; } @@ -940,11 +912,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) return self; } -- (bool) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index +- (BOOL) selectTrack: (GstAVFAssetReaderMediaType) type : (gint) index { NSArray *tracks; AVAssetTrack *track; - AVAssetReaderOutput **output; + AVAssetReaderOutput * __strong *output; NSDictionary *settings; NSString *mediaType; gint *selected_track; @@ -978,7 +950,6 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:settings]; - [*output retain]; [reader addOutput:*output]; return TRUE; } @@ -999,11 +970,11 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) - (void) stop { - [self->reader cancelReading]; + [reader cancelReading]; reading = FALSE; } -- (bool) hasMediaType: (GstAVFAssetReaderMediaType) type +- (BOOL) hasMediaType: (GstAVFAssetReaderMediaType) type { if (type == GST_AVF_ASSET_READER_MEDIA_TYPE_AUDIO) { return [audio_tracks count] != 0; @@ -1122,10 +1093,9 @@ gst_avf_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) return caps; } -- (oneway void) release +- (void) dealloc { - [asset release]; - + asset = nil; [self releaseReader]; if (audio_caps != NULL) { diff --git a/sys/applemedia/avfvideosrc.h b/sys/applemedia/avfvideosrc.h index 80682da64..ff2fec1fe 100644 --- a/sys/applemedia/avfvideosrc.h +++ b/sys/applemedia/avfvideosrc.h @@ -33,7 +33,7 @@ G_BEGIN_DECLS #define GST_AVF_VIDEO_SRC_CLASS(klass) \ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVF_VIDEO_SRC, GstAVFVideoSrcClass)) #define GST_AVF_VIDEO_SRC_IMPL(obj) \ - ((GstAVFVideoSrcImpl *) GST_AVF_VIDEO_SRC_CAST (obj)->impl) + ((__bridge GstAVFVideoSrcImpl *) GST_AVF_VIDEO_SRC_CAST (obj)->impl) #define GST_IS_AVF_VIDEO_SRC(obj) \ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVF_VIDEO_SRC)) #define GST_IS_AVF_VIDEO_SRC_CLASS(klass) \ @@ -46,6 +46,8 @@ struct _GstAVFVideoSrc { GstPushSrc push_src; + /* NOTE: ARC no longer allows Objective-C pointers in structs. */ + /* Instead, use gpointer with explicit __bridge_* calls */ gpointer impl; }; diff --git a/sys/applemedia/avfvideosrc.m b/sys/applemedia/avfvideosrc.m index 49559533d..97f4715b0 100644 --- a/sys/applemedia/avfvideosrc.m +++ b/sys/applemedia/avfvideosrc.m @@ -201,12 +201,8 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - (void)finalize { - dispatch_release (mainQueue); mainQueue = NULL; - dispatch_release (workerQueue); workerQueue = NULL; - - [super finalize]; } - (BOOL)openDeviceInput @@ -231,7 +227,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer device = [devices objectAtIndex:deviceIndex]; } g_assert (device != nil); - [device retain]; GST_INFO ("Opening '%s'", [[device localizedName] UTF8String]); @@ -242,11 +237,9 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ("Failed to open device: %s", [[err localizedDescription] UTF8String]), (NULL)); - [device release]; device = nil; return NO; } - [input retain]; return YES; } @@ -280,7 +273,6 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer } screenInput.capturesMouseClicks = captureScreenMouseClicks; input = screenInput; - [input retain]; return YES; #endif } @@ -337,17 +329,13 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer [session removeInput:input]; [session removeOutput:output]; - [session release]; session = nil; - [input release]; input = nil; - [output release]; output = nil; if (!captureScreen) { - [device release]; device = nil; } @@ -457,7 +445,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer CMVideoDimensions dimensions; /* formatDescription can't be retrieved with valueForKey so use a selector here */ - formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)]; + formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)]; dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); for (NSObject *rate in [f valueForKey:@"videoSupportedFrameRateRanges"]) { int min_fps_n, min_fps_d, max_fps_n, max_fps_d; @@ -529,7 +517,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer CMFormatDescriptionRef formatDescription; CMVideoDimensions dimensions; - formatDescription = (CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)]; + formatDescription = (__bridge CMFormatDescriptionRef) [f performSelector:@selector(formatDescription)]; dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); if (dimensions.width == info->width && dimensions.height == info->height) { found_format = TRUE; @@ -805,9 +793,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer dispatch_sync (mainQueue, ^{ [session stopRunning]; }); dispatch_sync (workerQueue, ^{}); - [bufQueueLock release]; bufQueueLock = nil; - [bufQueue release]; bufQueue = nil; if (textureCache) @@ -902,7 +888,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer if ([bufQueue count] == BUFFER_QUEUE_SIZE) [bufQueue removeLastObject]; - [bufQueue insertObject:@{@"sbuf": (id)sampleBuffer, + [bufQueue insertObject:@{@"sbuf": (__bridge id)sampleBuffer, @"timestamp": @(timestamp), @"duration": @(duration)} atIndex:0]; @@ -925,7 +911,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer } NSDictionary *dic = (NSDictionary *) [bufQueue lastObject]; - sbuf = (CMSampleBufferRef) dic[@"sbuf"]; + sbuf = (__bridge CMSampleBufferRef) dic[@"sbuf"]; timestamp = (GstClockTime) [dic[@"timestamp"] longLongValue]; duration = (GstClockTime) [dic[@"duration"] longLongValue]; CFRetain (sbuf); @@ -1224,28 +1210,16 @@ gst_avf_video_src_class_init (GstAVFVideoSrcClass * klass) 0, "iOS AVFoundation video source"); } -#define OBJC_CALLOUT_BEGIN() \ - NSAutoreleasePool *pool; \ - \ - pool = [[NSAutoreleasePool alloc] init] -#define OBJC_CALLOUT_END() \ - [pool release] - - static void gst_avf_video_src_init (GstAVFVideoSrc * src) { - OBJC_CALLOUT_BEGIN (); - src->impl = [[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)]; - OBJC_CALLOUT_END (); + src->impl = (__bridge_retained gpointer)[[GstAVFVideoSrcImpl alloc] initWithSrc:GST_PUSH_SRC (src)]; } static void gst_avf_video_src_finalize (GObject * obj) { - OBJC_CALLOUT_BEGIN (); - [GST_AVF_VIDEO_SRC_IMPL (obj) release]; - OBJC_CALLOUT_END (); + CFBridgingRelease(GST_AVF_VIDEO_SRC_CAST(obj)->impl); G_OBJECT_CLASS (parent_class)->finalize (obj); } @@ -1320,9 +1294,7 @@ gst_avf_video_src_change_state (GstElement * element, GstStateChange transition) { GstStateChangeReturn ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (element) changeState: transition]; - OBJC_CALLOUT_END (); return ret; } @@ -1332,9 +1304,7 @@ gst_avf_video_src_get_caps (GstBaseSrc * basesrc, GstCaps * filter) { GstCaps *ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) getCaps]; - OBJC_CALLOUT_END (); return ret; } @@ -1344,9 +1314,7 @@ gst_avf_video_src_set_caps (GstBaseSrc * basesrc, GstCaps * caps) { gboolean ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) setCaps:caps]; - OBJC_CALLOUT_END (); return ret; } @@ -1356,9 +1324,7 @@ gst_avf_video_src_start (GstBaseSrc * basesrc) { gboolean ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) start]; - OBJC_CALLOUT_END (); return ret; } @@ -1368,9 +1334,7 @@ gst_avf_video_src_stop (GstBaseSrc * basesrc) { gboolean ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) stop]; - OBJC_CALLOUT_END (); return ret; } @@ -1380,9 +1344,7 @@ gst_avf_video_src_query (GstBaseSrc * basesrc, GstQuery * query) { gboolean ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) query:query]; - OBJC_CALLOUT_END (); return ret; } @@ -1392,9 +1354,7 @@ gst_avf_video_src_unlock (GstBaseSrc * basesrc) { gboolean ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlock]; - OBJC_CALLOUT_END (); return ret; } @@ -1404,9 +1364,7 @@ gst_avf_video_src_unlock_stop (GstBaseSrc * basesrc) { gboolean ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (basesrc) unlockStop]; - OBJC_CALLOUT_END (); return ret; } @@ -1416,9 +1374,7 @@ gst_avf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buf) { GstFlowReturn ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (pushsrc) create: buf]; - OBJC_CALLOUT_END (); return ret; } @@ -1429,9 +1385,7 @@ gst_avf_video_src_fixate (GstBaseSrc * bsrc, GstCaps * caps) { GstCaps *ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) fixate:caps]; - OBJC_CALLOUT_END (); return ret; } @@ -1442,9 +1396,7 @@ gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc, { gboolean ret; - OBJC_CALLOUT_BEGIN (); ret = [GST_AVF_VIDEO_SRC_IMPL (bsrc) decideAllocation:query]; - OBJC_CALLOUT_END (); return ret; } @@ -1452,7 +1404,5 @@ gst_avf_video_src_decide_allocation (GstBaseSrc * bsrc, static void gst_avf_video_src_set_context (GstElement * element, GstContext * context) { - OBJC_CALLOUT_BEGIN (); [GST_AVF_VIDEO_SRC_IMPL (element) setContext:context]; - OBJC_CALLOUT_END (); } diff --git a/sys/applemedia/avsamplevideosink.h b/sys/applemedia/avsamplevideosink.h index 3b9ed478d..6e7d5959d 100644 --- a/sys/applemedia/avsamplevideosink.h +++ b/sys/applemedia/avsamplevideosink.h @@ -42,6 +42,8 @@ G_BEGIN_DECLS (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AV_SAMPLE_VIDEO_SINK)) #define GST_IS_AV_SAMPLE_VIDEO_SINK_CLASS(klass) \ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AV_SAMPLE_VIDEO_SINK)) +#define GST_AV_SAMPLE_VIDEO_SINK_LAYER(obj) \ + ((__bridge AVSampleBufferDisplayLayer *)(obj->layer)) typedef struct _GstAVSampleVideoSink GstAVSampleVideoSink; typedef struct _GstAVSampleVideoSinkClass GstAVSampleVideoSinkClass; @@ -50,7 +52,9 @@ struct _GstAVSampleVideoSink { GstVideoSink video_sink; - AVSampleBufferDisplayLayer *layer; + /* NOTE: ARC no longer allows Objective-C pointers in structs. */ + /* Instead, use gpointer with explicit __bridge_* calls */ + gpointer layer; GstVideoInfo info; diff --git a/sys/applemedia/avsamplevideosink.m b/sys/applemedia/avsamplevideosink.m index ce17e98f3..8e2d79ba1 100644 --- a/sys/applemedia/avsamplevideosink.m +++ b/sys/applemedia/avsamplevideosink.m @@ -156,11 +156,11 @@ static void gst_av_sample_video_sink_finalize (GObject * object) { GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (object); - __block AVSampleBufferDisplayLayer *layer = av_sink->layer; + __block gpointer layer = av_sink->layer; if (layer) { dispatch_async (dispatch_get_main_queue (), ^{ - [layer release]; + CFBridgingRelease(layer); }); } @@ -198,19 +198,21 @@ gst_av_sample_video_sink_start (GstBaseSink * bsink) GstAVSampleVideoSink *av_sink = GST_AV_SAMPLE_VIDEO_SINK (bsink); if ([NSThread isMainThread]) { - av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init]; + AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init]; + av_sink->layer = (__bridge_retained gpointer)layer; if (av_sink->keep_aspect_ratio) - av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect; + layer.videoGravity = AVLayerVideoGravityResizeAspect; else - av_sink->layer.videoGravity = AVLayerVideoGravityResize; + layer.videoGravity = AVLayerVideoGravityResize; g_object_notify (G_OBJECT (av_sink), "layer"); } else { dispatch_sync (dispatch_get_main_queue (), ^{ - av_sink->layer = [[AVSampleBufferDisplayLayer alloc] init]; + AVSampleBufferDisplayLayer *layer = [[AVSampleBufferDisplayLayer alloc] init]; + av_sink->layer = (__bridge_retained gpointer)layer; if (av_sink->keep_aspect_ratio) - av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect; + layer.videoGravity = AVLayerVideoGravityResizeAspect; else - av_sink->layer.videoGravity = AVLayerVideoGravityResize; + layer.videoGravity = AVLayerVideoGravityResize; g_object_notify (G_OBJECT (av_sink), "layer"); }); } @@ -224,7 +226,7 @@ _stop_requesting_data (GstAVSampleVideoSink * av_sink) { if (av_sink->layer) { if (av_sink->layer_requesting_data) - [av_sink->layer stopRequestingMediaData]; + [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) stopRequestingMediaData]; av_sink->layer_requesting_data = FALSE; } } @@ -243,7 +245,7 @@ gst_av_sample_video_sink_stop (GstBaseSink * bsink) g_mutex_lock (&av_sink->render_lock); _stop_requesting_data (av_sink); g_mutex_unlock (&av_sink->render_lock); - [av_sink->layer flushAndRemoveImage]; + [GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink) flushAndRemoveImage]; } return TRUE; @@ -661,11 +663,12 @@ _enqueue_sample (GstAVSampleVideoSink * av_sink, GstBuffer *buf) kCFBooleanTrue); } + AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink); if (av_sink->keep_aspect_ratio) - av_sink->layer.videoGravity = AVLayerVideoGravityResizeAspect; + layer.videoGravity = AVLayerVideoGravityResizeAspect; else - av_sink->layer.videoGravity = AVLayerVideoGravityResize; - [av_sink->layer enqueueSampleBuffer:sample_buf]; + layer.videoGravity = AVLayerVideoGravityResize; + [layer enqueueSampleBuffer:sample_buf]; CFRelease (pbuf); CFRelease (sample_buf); @@ -678,13 +681,14 @@ _request_data (GstAVSampleVideoSink * av_sink) { av_sink->layer_requesting_data = TRUE; - [av_sink->layer requestMediaDataWhenReadyOnQueue: + AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink); + [layer requestMediaDataWhenReadyOnQueue: dispatch_get_global_queue (DISPATCH_QUEUE_PRIORITY_DEFAULT, 0) usingBlock:^{ while (TRUE) { /* don't needlessly fill up avsamplebufferdisplaylayer's queue. * This also allows us to skip displaying late frames */ - if (!av_sink->layer.readyForMoreMediaData) + if (!layer.readyForMoreMediaData) break; g_mutex_lock (&av_sink->render_lock); @@ -752,9 +756,10 @@ gst_av_sample_video_sink_show_frame (GstVideoSink * vsink, GstBuffer * buf) MAC_OS_X_VERSION_MAX_ALLOWED >= 1010 && \ defined(MAC_OS_X_VERSION_MIN_REQUIRED) && \ MAC_OS_X_VERSION_MIN_REQUIRED <= MAC_OS_X_VERSION_10_4 - if ([av_sink->layer status] == AVQueuedSampleBufferRenderingStatusFailed) { + AVSampleBufferDisplayLayer *layer = GST_AV_SAMPLE_VIDEO_SINK_LAYER(av_sink); + if ([layer status] == AVQueuedSampleBufferRenderingStatusFailed) { GST_ERROR_OBJECT (av_sink, "failed to enqueue buffer on layer, %s", - [[[av_sink->layer error] description] UTF8String]); + [[[layer error] description] UTF8String]); return GST_FLOW_ERROR; } #endif diff --git a/sys/applemedia/iosassetsrc.h b/sys/applemedia/iosassetsrc.h index 550b37a05..f215ab729 100644 --- a/sys/applemedia/iosassetsrc.h +++ b/sys/applemedia/iosassetsrc.h @@ -44,6 +44,12 @@ G_BEGIN_DECLS #define GST_IS_IOS_ASSET_SRC_CLASS(klass) \ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IOS_ASSET_SRC)) #define GST_IOS_ASSET_SRC_CAST(obj) ((GstIOSAssetSrc*) obj) +#define GST_IOS_ASSET_SRC_ASSET(obj) \ + (__bridge ALAssetRepresentation *)(obj->asset) +#define GST_IOS_ASSET_SRC_LIBRARY(obj) \ + (__bridge GstAssetsLibrary *)(obj->library) +#define GST_IOS_ASSET_SRC_URL(obj) \ + (__bridge NSURL *)(obj->url) typedef struct _GstIOSAssetSrc GstIOSAssetSrc; typedef struct _GstIOSAssetSrcClass GstIOSAssetSrcClass; @@ -68,9 +74,12 @@ struct _GstIOSAssetSrc { /*< private >*/ gchar * uri; /* asset uri */ - NSURL * url; /* asset url */ - ALAssetRepresentation * asset; /* asset representation */ - GstAssetsLibrary * library; /* assets library */ + + /* NOTE: ARC no longer allows Objective-C pointers in structs. */ + /* Instead, use gpointer with explicit __bridge_* calls */ + gpointer url; /* asset url */ + gpointer asset; /* asset representation */ + gpointer library; /* assets library */ }; struct _GstIOSAssetSrcClass { diff --git a/sys/applemedia/iosassetsrc.m b/sys/applemedia/iosassetsrc.m index e237c2ec5..6a8b93705 100644 --- a/sys/applemedia/iosassetsrc.m +++ b/sys/applemedia/iosassetsrc.m @@ -52,12 +52,6 @@ GST_DEBUG_CATEGORY_STATIC (gst_ios_asset_src_debug); #define DEFAULT_BLOCKSIZE 4*1024 -#define OBJC_CALLOUT_BEGIN() \ - NSAutoreleasePool *pool; \ - \ - pool = [[NSAutoreleasePool alloc] init] -#define OBJC_CALLOUT_END() \ - [pool release] enum { @@ -142,25 +136,22 @@ gst_ios_asset_src_class_init (GstIOSAssetSrcClass * klass) static void gst_ios_asset_src_init (GstIOSAssetSrc * src) { - OBJC_CALLOUT_BEGIN (); src->uri = NULL; src->asset = NULL; - src->library = [[[GstAssetsLibrary alloc] init] retain]; + src->library = (__bridge_retained gpointer)[[GstAssetsLibrary alloc] init]; gst_base_src_set_blocksize (GST_BASE_SRC (src), DEFAULT_BLOCKSIZE); - OBJC_CALLOUT_END (); } static void gst_ios_asset_src_free_resources (GstIOSAssetSrc *src) { - OBJC_CALLOUT_BEGIN (); if (src->asset != NULL) { - [src->asset release]; + CFBridgingRelease(src->asset); src->asset = NULL; } if (src->url != NULL) { - [src->url release]; + CFBridgingRelease(src->url); src->url = NULL; } @@ -168,7 +159,6 @@ gst_ios_asset_src_free_resources (GstIOSAssetSrc *src) g_free (src->uri); src->uri = NULL; } - OBJC_CALLOUT_END (); } static void @@ -176,12 +166,10 @@ gst_ios_asset_src_finalize (GObject * object) { GstIOSAssetSrc *src; - OBJC_CALLOUT_BEGIN (); src = GST_IOS_ASSET_SRC (object); gst_ios_asset_src_free_resources (src); - [src->library release]; + CFBridgingRelease(src->library); - OBJC_CALLOUT_END (); G_OBJECT_CLASS (gst_ios_asset_src_parent_class)->finalize (object); } @@ -192,7 +180,6 @@ gst_ios_asset_src_set_uri (GstIOSAssetSrc * src, const gchar * uri, GError **err NSString *nsuristr; NSURL *url; - OBJC_CALLOUT_BEGIN (); /* the element must be stopped in order to do this */ GST_OBJECT_LOCK (src); state = GST_STATE (src); @@ -213,11 +200,10 @@ gst_ios_asset_src_set_uri (GstIOSAssetSrc * src, const gchar * uri, GError **err } GST_INFO_OBJECT (src, "URI : %s", src->uri); - src->url = url; + src->url = (__bridge_retained gpointer)url; src->uri = g_strdup (uri); g_object_notify (G_OBJECT (src), "uri"); - OBJC_CALLOUT_END (); return TRUE; /* ERROR */ @@ -229,7 +215,6 @@ wrong_state: "Changing the 'uri' property on iosassetsrc when an asset is " "open is not supported."); GST_OBJECT_UNLOCK (src); - OBJC_CALLOUT_END (); return FALSE; } } @@ -285,7 +270,6 @@ gst_ios_asset_src_create (GstBaseSrc * basesrc, guint64 offset, guint length, GstFlowReturn ret; GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc); - OBJC_CALLOUT_BEGIN (); buf = gst_buffer_new_and_alloc (length); if (G_UNLIKELY (buf == NULL && length > 0)) { GST_ERROR_OBJECT (src, "Failed to allocate %u bytes", length); @@ -296,10 +280,10 @@ gst_ios_asset_src_create (GstBaseSrc * basesrc, guint64 offset, guint length, gst_buffer_map (buf, &info, GST_MAP_READWRITE); /* No need to read anything if length is 0 */ - bytes_read = [src->asset getBytes: info.data - fromOffset:offset - length:length - error:&err]; + bytes_read = [GST_IOS_ASSET_SRC_ASSET(src) getBytes: info.data + fromOffset:offset + length:length + error:&err]; if (G_UNLIKELY (err != NULL)) { goto could_not_read; } @@ -333,7 +317,6 @@ could_not_read: } exit: { - OBJC_CALLOUT_END (); return ret; } @@ -374,9 +357,7 @@ gst_ios_asset_src_get_size (GstBaseSrc * basesrc, guint64 * size) src = GST_IOS_ASSET_SRC (basesrc); - OBJC_CALLOUT_BEGIN (); - *size = (guint64) [src->asset size]; - OBJC_CALLOUT_END (); + *size = (guint64) [GST_IOS_ASSET_SRC_ASSET(src) size]; return TRUE; } @@ -386,8 +367,7 @@ gst_ios_asset_src_start (GstBaseSrc * basesrc) GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc); gboolean ret = TRUE; - OBJC_CALLOUT_BEGIN (); - src->asset = [[src->library assetForURLSync: src->url] retain]; + src->asset = (__bridge_retained gpointer)[GST_IOS_ASSET_SRC_LIBRARY(src) assetForURLSync: GST_IOS_ASSET_SRC_URL(src)]; if (src->asset == NULL) { GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, @@ -396,7 +376,6 @@ gst_ios_asset_src_start (GstBaseSrc * basesrc) ret = FALSE; }; - OBJC_CALLOUT_END (); return ret; } @@ -406,9 +385,7 @@ gst_ios_asset_src_stop (GstBaseSrc * basesrc) { GstIOSAssetSrc *src = GST_IOS_ASSET_SRC (basesrc); - OBJC_CALLOUT_BEGIN (); - [src->asset release]; - OBJC_CALLOUT_END (); + CFBridgingRelease(src->asset); return TRUE; } @@ -480,24 +457,23 @@ gst_ios_asset_src_uri_handler_init (gpointer g_iface, gpointer iface_data) dispatch_async(queue, ^{ [self assetForURL:uri resultBlock: - ^(ALAsset *myasset) - { - self.asset = myasset; - self.result = [myasset defaultRepresentation]; - - dispatch_semaphore_signal(sema); - } - failureBlock: - ^(NSError *myerror) - { - self.result = nil; - dispatch_semaphore_signal(sema); - } + ^(ALAsset *myasset) + { + self.asset = myasset; + self.result = [myasset defaultRepresentation]; + + dispatch_semaphore_signal(sema); + } + failureBlock: + ^(NSError *myerror) + { + self.result = nil; + dispatch_semaphore_signal(sema); + } ]; }); dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER); - dispatch_release(sema); return self.result; } diff --git a/sys/applemedia/plugin.m b/sys/applemedia/plugin.m index fb7ee928a..05c279568 100644 --- a/sys/applemedia/plugin.m +++ b/sys/applemedia/plugin.m @@ -56,7 +56,6 @@ enable_mt_mode (void) { NSThread * th = [[NSThread alloc] init]; [th start]; - [th release]; g_assert ([NSThread isMultiThreaded]); } #endif diff --git a/sys/applemedia/videotexturecache.m b/sys/applemedia/videotexturecache.m index b2add1ce6..2aa87dd35 100644 --- a/sys/applemedia/videotexturecache.m +++ b/sys/applemedia/videotexturecache.m @@ -45,10 +45,10 @@ typedef struct _ContextThreadData typedef struct _TextureWrapper { #if HAVE_IOS - CVOpenGLESTextureCacheRef *cache; + CVOpenGLESTextureCacheRef cache; CVOpenGLESTextureRef texture; #else - CVOpenGLTextureCacheRef *cache; + CVOpenGLTextureCacheRef cache; CVOpenGLTextureRef texture; #endif @@ -69,7 +69,7 @@ gst_video_texture_cache_new (GstGLContext * ctx) CFDictionaryCreateMutable (NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); CVOpenGLESTextureCacheCreate (kCFAllocatorDefault, (CFDictionaryRef) cache_attrs, - (CVEAGLContext) gst_gl_context_get_gl_context (ctx), NULL, &cache->cache); + (__bridge CVEAGLContext) (gpointer)gst_gl_context_get_gl_context (ctx), NULL, &cache->cache); #else gst_ios_surface_memory_init (); #if 0 @@ -199,7 +199,8 @@ _do_create_memory (GstGLContext * context, ContextThreadData * data) success: { TextureWrapper *texture_data = g_new(TextureWrapper, 1); - texture_data->cache = CFRetain(cache->cache); + CFRetain(cache->cache); + texture_data->cache = cache->cache; texture_data->texture = texture; gl_target = gst_gl_texture_target_from_gl (CVOpenGLESTextureGetTarget (texture)); memory = gst_apple_core_video_memory_new_wrapped (gpixbuf, plane, size); |