summaryrefslogtreecommitdiff
path: root/sys/applemedia
diff options
context:
space:
mode:
authorOlivier CrĂȘte <olivier.crete@collabora.com>2012-10-30 19:30:38 +0100
committerOlivier CrĂȘte <olivier.crete@collabora.com>2012-10-30 19:32:59 +0100
commit9a328a8f71ad253d18e9c61e046fa31e76d63b57 (patch)
treeed82c0c4c1ffc102ea46b685d75830e1ceafd8be /sys/applemedia
parent701fd9b33604b5db9c4e3666a75447f01d3a4459 (diff)
downloadgstreamer-plugins-bad-9a328a8f71ad253d18e9c61e046fa31e76d63b57.tar.gz
avfvideosrc: Negotiate format, also support more resolutions
This should enable supprt for old and new iPhones with the same code.
Diffstat (limited to 'sys/applemedia')
-rw-r--r--sys/applemedia/avfvideosrc.m119
1 files changed, 101 insertions, 18 deletions
diff --git a/sys/applemedia/avfvideosrc.m b/sys/applemedia/avfvideosrc.m
index 717b5c045..1464b0d9a 100644
--- a/sys/applemedia/avfvideosrc.m
+++ b/sys/applemedia/avfvideosrc.m
@@ -27,8 +27,6 @@
#define DEFAULT_DEVICE_INDEX -1
#define DEFAULT_DO_STATS FALSE
-#define DEVICE_VIDEO_FORMAT GST_VIDEO_FORMAT_YUY2
-#define DEVICE_YUV_FOURCC "YUY2"
#define DEVICE_FPS_N 25
#define DEVICE_FPS_D 1
@@ -37,11 +35,36 @@
GST_DEBUG_CATEGORY (gst_avf_video_src_debug);
#define GST_CAT_DEFAULT gst_avf_video_src_debug
+#define VIDEO_CAPS_YUV(width, height) "video/x-raw-yuv, " \
+ "format = (fourcc) { NV12, UYVY, YUY2 }, " \
+ "framerate = " GST_VIDEO_FPS_RANGE ", " \
+ "width = (int) " G_STRINGIFY (width) ", height = (int) " G_STRINGIFY (height)
+
+#define VIDEO_CAPS_BGRA(width, height) "video/x-raw-rgb, " \
+ "bpp = (int) 32, " \
+ "depth = (int) 32, " \
+ "endianness = (int) BIG_ENDIAN, " \
+ "red_mask = (int) " GST_VIDEO_BYTE3_MASK_32 ", " \
+ "green_mask = (int) " GST_VIDEO_BYTE2_MASK_32 ", " \
+ "blue_mask = (int) " GST_VIDEO_BYTE1_MASK_32 ", " \
+ "alpha_mask = (int) " GST_VIDEO_BYTE4_MASK_32 ", " \
+ "width = (int) " G_STRINGIFY (width) ", height = (int) " G_STRINGIFY (height)
+
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (
- GST_VIDEO_CAPS_YUV (DEVICE_YUV_FOURCC))
+ GST_STATIC_CAPS (VIDEO_CAPS_YUV (192, 144) ";"
+ VIDEO_CAPS_YUV (480, 360) ";"
+ VIDEO_CAPS_YUV (352, 288) ";"
+ VIDEO_CAPS_YUV (640, 480) ";"
+ VIDEO_CAPS_YUV (1280, 720) ";"
+ VIDEO_CAPS_YUV (1920, 1280) ";"
+ VIDEO_CAPS_BGRA (192, 144) ";"
+ VIDEO_CAPS_BGRA (480, 360) ";"
+ VIDEO_CAPS_BGRA (352, 288) ";"
+ VIDEO_CAPS_BGRA (640, 480) ";"
+ VIDEO_CAPS_BGRA (1280, 720) ";"
+ VIDEO_CAPS_BGRA (1920, 1280))
);
typedef enum _QueueState {
@@ -71,6 +94,7 @@ static GstPushSrcClass * parent_class;
NSMutableArray *bufQueue;
BOOL stopRequest;
+ GstVideoFormat format;
gint width, height;
GstClockTime duration;
guint64 offset;
@@ -249,43 +273,74 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
bufferFactory = nil;
}
-#define GST_AVF_CAPS_NEW(w, h) \
- (gst_video_format_new_caps (DEVICE_VIDEO_FORMAT, w, h, \
+#define GST_AVF_CAPS_NEW(format, w, h) \
+ (gst_video_format_new_caps (format, w, h, \
DEVICE_FPS_N, DEVICE_FPS_D, 1, 1))
- (GstCaps *)getCaps
{
GstCaps *result;
+ NSArray *formats;
if (session == nil)
return NULL; /* BaseSrc will return template caps */
+
+ result = gst_caps_new_empty ();
- result = GST_AVF_CAPS_NEW (192, 144);
- if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
- gst_caps_merge (result, GST_AVF_CAPS_NEW (480, 360));
- if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
- gst_caps_merge (result, GST_AVF_CAPS_NEW (640, 480));
- if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
- gst_caps_merge (result, GST_AVF_CAPS_NEW (1280, 720));
+ formats = output.availableVideoCVPixelFormatTypes;
+ for (id object in formats) {
+ NSNumber *nsformat = object;
+ GstVideoFormat gstformat = GST_VIDEO_FORMAT_UNKNOWN;
+
+ switch ([nsformat integerValue]) {
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: /* 420v */
+ gstformat = GST_VIDEO_FORMAT_NV12;
+ break;
+ case kCVPixelFormatType_422YpCbCr8: /* 2vuy */
+ gstformat = GST_VIDEO_FORMAT_UYVY;
+ break;
+ case kCVPixelFormatType_32BGRA: /* BGRA */
+ gstformat = GST_VIDEO_FORMAT_BGRA;
+ break;
+ case kCVPixelFormatType_422YpCbCr8_yuvs: /* yuvs */
+ gstformat = GST_VIDEO_FORMAT_YUY2;
+ break;
+ default:
+ continue;
+ }
+
+ gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 192, 144));
+ if ([session canSetSessionPreset:AVCaptureSessionPreset352x288])
+ gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 352, 288));
+ if ([session canSetSessionPreset:AVCaptureSessionPresetMedium])
+ gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 480, 360));
+ if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
+ gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 640, 480));
+ if ([session canSetSessionPreset:AVCaptureSessionPreset1280x720])
+ gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 1280, 720));
+ if ([session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
+ gst_caps_append (result, GST_AVF_CAPS_NEW (gstformat, 1920, 1080));
+ }
return result;
}
- (BOOL)setCaps:(GstCaps *)caps
{
- GstStructure *s;
-
- s = gst_caps_get_structure (caps, 0);
- gst_structure_get_int (s, "width", &width);
- gst_structure_get_int (s, "height", &height);
+ gst_video_format_parse_caps (caps, &format, &width, &height);
dispatch_async (mainQueue, ^{
+ int newformat;
+
g_assert (![session isRunning]);
switch (width) {
case 192:
session.sessionPreset = AVCaptureSessionPresetLow;
break;
+ case 352:
+ session.sessionPreset = AVCaptureSessionPreset352x288;
+ break;
case 480:
session.sessionPreset = AVCaptureSessionPresetMedium;
break;
@@ -295,10 +350,38 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
case 1280:
session.sessionPreset = AVCaptureSessionPreset1280x720;
break;
+ case 1920:
+ session.sessionPreset = AVCaptureSessionPreset1920x1080;
+ break;
+ default:
+ g_assert_not_reached ();
+ }
+
+ switch (format) {
+ case GST_VIDEO_FORMAT_NV12:
+ newformat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ newformat = kCVPixelFormatType_422YpCbCr8;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ newformat = kCVPixelFormatType_422YpCbCr8_yuvs;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ newformat = kCVPixelFormatType_32BGRA;
+ break;
default:
g_assert_not_reached ();
}
+ GST_DEBUG_OBJECT(element,
+ "Width: %d Height: %d Format: %" GST_FOURCC_FORMAT,
+ width, height,
+ GST_FOURCC_ARGS (gst_video_format_to_fourc(format)));
+
+
+ output.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:newformat] forKey:(NSString*)kCVPixelBu
+
[session startRunning];
});
[self waitForMainQueueToDrain];