diff options
Diffstat (limited to 'SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com')
23 files changed, 1870 insertions, 0 deletions
diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/activity/DecodeActivity.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/activity/DecodeActivity.java new file mode 100755 index 000000000..bc02c7c10 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/activity/DecodeActivity.java @@ -0,0 +1,196 @@ +package com.batutin.android.androidvideostreaming.activity; + +import android.app.Activity; +import android.graphics.Bitmap; +import android.media.CamcorderProfile; +import android.os.Bundle; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.View; +import android.widget.RelativeLayout; + +import com.batutin.android.androidvideostreaming.R; +import com.batutin.android.androidvideostreaming.colorspace.ColorSpaceUtils; +import com.batutin.android.androidvideostreaming.media.CamcorderProfileUtils; +import com.batutin.android.androidvideostreaming.media.VideoAvcCoder; +import com.batutin.android.androidvideostreaming.media.VideoAvcCoderDataStreamListener; +import com.batutin.android.androidvideostreaming.utils.ALog; +import com.batutin.android.androidvideostreaming.videopreview.BitmapGeneratorThread; +import com.batutin.android.androidvideostreaming.videopreview.FrameDataSource; +import com.batutin.android.androidvideostreaming.videopreview.PlayerThread; +import com.batutin.android.androidvideostreaming.videopreview.PlayerThreadState; +import com.batutin.android.androidvideostreaming.videopreview.VideoPreviewThread; + +import java.io.IOException; +import java.io.PipedInputStream; +import java.io.PipedOutputStream; +import java.nio.ByteBuffer; + +public class DecodeActivity extends Activity implements SurfaceHolder.Callback, VideoAvcCoderDataStreamListener, PlayerThreadState, FrameDataSource { + + + private PlayerThread mPlayer; + private CamcorderProfile mCamcorderProfile; + private BitmapGeneratorThread mBitmapGenerator; + + public PlayerThread getPlayer() { + return mPlayer; + } + + public CamcorderProfile getCamcorderProfile() { + return mCamcorderProfile; + } + + public BitmapGeneratorThread getBitmapGenerator() { + return mBitmapGenerator; + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + configureLogger(); + mCamcorderProfile = CamcorderProfileUtils.getFirstCameraCamcorderProfile(CamcorderProfile.QUALITY_LOW); + setContentView(R.layout.decode_activity); + SurfaceView sv = (SurfaceView) findViewById(R.id.surfaceView); + sv.getHolder().addCallback(this); + } + + private void configureLogger() { + ALog.setTag("DecodeActivity"); + ALog.setLevel(ALog.Level.V); + } + + @Override + public void surfaceCreated(SurfaceHolder holder) { + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { + configureVideoCoding(holder); + ALog.d("surfaceChanged" + mPlayer.toString()); + } + + private void configureVideoCoding(SurfaceHolder holder) { + PipedInputStream pipedReader = new PipedInputStream(); + PipedOutputStream pipedWriter = new PipedOutputStream(); + mBitmapGenerator = new BitmapGeneratorThread(pipedWriter, this, this); + mPlayer = new PlayerThread(this, holder.getSurface(), pipedReader); + try { + pipedWriter.connect(pipedReader); + } catch (IOException e) { + e.printStackTrace(); + } + try { + ALog.i(mBitmapGenerator.getState().name()); + if (mBitmapGenerator.getState().equals(Thread.State.NEW)) { + mBitmapGenerator.start(); + } + ALog.i(mPlayer.getState().name()); + if (mPlayer.getState().equals(Thread.State.NEW)) { + mPlayer.start(); + } + } catch (IllegalThreadStateException e) { + ALog.e(e.getMessage()); + } + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + ALog.d("surfaceDestroyed" + mPlayer.toString()); + } + + @Override + protected void onPause() { + super.onPause(); + if (mPlayer != null) { + endStream(null); + ALog.d("onPause" + mPlayer.toString()); + } + } + + public void endStream(View v) { + mBitmapGenerator.shouldStop(); + mPlayer.getVideoAvcCoder().shouldStop(); + + } + + public byte[] createTestByteArray() { + RelativeLayout view = (RelativeLayout) findViewById(R.id.testLayout); + view.buildDrawingCache(); + Bitmap bitmap = Bitmap.createBitmap(view.getDrawingCache(), 0, 0, mCamcorderProfile.videoFrameWidth, mCamcorderProfile.videoFrameHeight); + view.destroyDrawingCache(); + byte[] data = ColorSpaceUtils.getNV21(mCamcorderProfile.videoFrameWidth, mCamcorderProfile.videoFrameHeight, bitmap); + return data; + } + + + @Override + public void dataEncodingShouldStart(VideoAvcCoder videoAvcCoder) { + + } + + @Override + public void dataEncodingStarted(VideoAvcCoder videoAvcCoder) { + + } + + @Override + public void frameShouldBeEncoded(VideoAvcCoder videoAvcCoder, byte[] frame) { + + } + + @Override + public void settingsDataReceived(VideoAvcCoder videoAvcCoder, ByteBuffer settingsData) { + + } + + @Override + public void frameWasEncoded(VideoAvcCoder videoAvcCoder, ByteBuffer encodedFrame) { + + } + + @Override + public void dataEncodingShouldStop(VideoAvcCoder videoAvcCoder) { + + } + + @Override + public void dataEncodingStopped(VideoAvcCoder videoAvcCoder) { + + } + + @Override + public void dataDecodingShouldStart(VideoAvcCoder videoAvcCoder) { + + } + + @Override + public void dataDecodingStarted(VideoAvcCoder videoAvcCoder) { + + } + + @Override + public void frameShouldBeDecoded(VideoAvcCoder videoAvcCoder, ByteBuffer frame) { + + } + + @Override + public void dataDecodingShouldStop(VideoAvcCoder videoAvcCoder) { + + } + + @Override + public void dataDecodingStopped(VideoAvcCoder videoAvcCoder) { + + } + + @Override + public void threadShouldStop(VideoPreviewThread thread) { + mPlayer.getVideoAvcCoder().shouldStop(); + } + + @Override + public byte[] createVideoFrame() { + return createTestByteArray(); + } +}
\ No newline at end of file diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/colorspace/ColorSpaceUtils.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/colorspace/ColorSpaceUtils.java new file mode 100644 index 000000000..3fb33cbd5 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/colorspace/ColorSpaceUtils.java @@ -0,0 +1,59 @@ +package com.batutin.android.androidvideostreaming.colorspace; + +import android.graphics.Bitmap; + +/** + * Created by Andrew Batutin on 7/31/13. + */ +public class ColorSpaceUtils { + + public static byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) { + + int[] argb = new int[inputWidth * inputHeight]; + + scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight); + + byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2]; + encodeYUV420SP(yuv, argb, inputWidth, inputHeight); + + scaled.recycle(); + + return yuv; + } + + public static void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) { + final int frameSize = width * height; + + int yIndex = 0; + int uvIndex = frameSize; + + int a, R, G, B, Y, U, V; + int index = 0; + for (int j = 0; j < height; j++) { + for (int i = 0; i < width; i++) { + + a = (argb[index] & 0xff000000) >> 24; // a is not used obviously + R = (argb[index] & 0xff0000) >> 16; + G = (argb[index] & 0xff00) >> 8; + B = (argb[index] & 0xff) >> 0; + + // well known RGB to YUV algorithm + Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16; + U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128; + V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128; + + // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2 + // meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other + // pixel AND every other scanline. + yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y)); + if (j % 2 == 0 && index % 2 == 0) { + yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V)); + yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U)); + } + + index++; + } + } + } + +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/AbstractMediaCoder.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/AbstractMediaCoder.java new file mode 100644 index 000000000..54b5df00f --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/AbstractMediaCoder.java @@ -0,0 +1,46 @@ +package com.batutin.android.androidvideostreaming.media; + +import android.media.MediaCodec; +import android.media.MediaFormat; + +/** + * Created by Andrew Batutin on 8/12/13. + */ +public abstract class AbstractMediaCoder implements MediaCoderState { + public static final String MIME_TYPE = "video/avc"; + protected boolean isConfigured = false; + protected boolean isRunning = false; + protected MediaFormat mediaFormat; + protected MediaCodec codec; + + public boolean isRunning() { + return isRunning; + } + + public boolean isConfigured() { + return isConfigured; + } + + public MediaFormat getMediaFormat() { + return mediaFormat; + } + + @Override + public void start() throws IllegalStateException { + if (isRunning == false) { + codec.start(); + isRunning = true; + } + } + + @Override + public void stop() throws IllegalStateException { + if (isRunning == true) { + codec.stop(); + codec.release(); + isRunning = false; + } + } + + protected abstract MediaCodec createMediaCodec(); +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/CamcorderProfileUtils.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/CamcorderProfileUtils.java new file mode 100644 index 000000000..2e3294749 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/CamcorderProfileUtils.java @@ -0,0 +1,49 @@ +package com.batutin.android.androidvideostreaming.media; + +import android.hardware.Camera; +import android.media.CamcorderProfile; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.util.HashMap; +import java.util.Map; + +public class CamcorderProfileUtils { + + public static CamcorderProfile getFirstCameraCamcorderProfile(int quality) throws IllegalArgumentException { + ALog.d("start getting CamcorderProfile"); + int nCamera = Camera.getNumberOfCameras(); + CamcorderProfile profile = null; + for (int cameraId = 0; cameraId < nCamera; cameraId++) { + profile = getCamcorderProfile(cameraId, quality); + break; + } + if (profile == null) { + ALog.e("failed to get CamcorderProfile"); + throw new IllegalArgumentException("CamcorderProfile is null"); + } + ALog.d("CamcorderProfile " + profile.toString() + " found"); + return profile; + } + + public static Map<Integer, CamcorderProfile> getSupportedCamcorderProfileMapForCamera(int cameraId) { + ALog.d("start getting CamcorderProfile map"); + Map<Integer, CamcorderProfile> profileMap = new HashMap<Integer, CamcorderProfile>(); + for (int quality = CamcorderProfile.QUALITY_LOW; quality <= CamcorderProfile.QUALITY_QVGA; quality++) { + CamcorderProfile profile = getCamcorderProfile(cameraId, quality); + if (profile != null) { + profileMap.put(quality, profile); + } + } + ALog.d("CamcorderProfile " + profileMap.toString() + " found"); + return profileMap; + } + + private static CamcorderProfile getCamcorderProfile(int cameraId, int quality) throws IllegalArgumentException { + if (CamcorderProfile.hasProfile(cameraId, quality) == false) { + return null; + } + CamcorderProfile profile = CamcorderProfile.get(cameraId, quality); + return profile; + } +}
\ No newline at end of file diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/CodecInfoUtils.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/CodecInfoUtils.java new file mode 100644 index 000000000..435def48f --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/CodecInfoUtils.java @@ -0,0 +1,65 @@ +package com.batutin.android.androidvideostreaming.media; + +import android.media.MediaCodecInfo; +import android.media.MediaCodecList; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.util.ArrayList; +import java.util.List; + +public class CodecInfoUtils { + + public static final String MIME_TYPE = "video/avc"; + + public static MediaCodecInfo selectFirstVideoAvcCodec() throws IllegalArgumentException { + return selectFirstCodec(MIME_TYPE); + } + + /** + * Returns the first codec capable of encoding the specified MIME type, or null if no + * match was found. + */ + public static MediaCodecInfo selectFirstCodec(String mimeType) throws IllegalArgumentException { + ALog.d("start getting MediaCodecInfo"); + List<MediaCodecInfo> infoList = getSupportedMediaCodecInfoList(mimeType); + if (infoList == null || infoList.size() <= 0) { + ALog.e("failed getting MediaCodecInfo"); + throw new IllegalArgumentException("no available codecs"); + } + MediaCodecInfo codecI = infoList.get(0); + ALog.d("MediaCodecInfo " + codecI.toString() + " found"); + return codecI; + } + + public static List<MediaCodecInfo> getSupportedMediaCodecInfoList(String mimeType) throws IllegalArgumentException { + ALog.d("start getting MediaCodecInfo list"); + int numCodecs = MediaCodecList.getCodecCount(); + if (numCodecs <= 0) { + ALog.e("failed getting MediaCodecInfo list"); + throw new IllegalArgumentException("no available codecs"); + } + List<MediaCodecInfo> infoList = new ArrayList<MediaCodecInfo>(1); + for (int i = 0; i < numCodecs; i++) { + MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); + if (!codecInfo.isEncoder()) { + continue; + } + if (isCodecTypeMimeType(mimeType, codecInfo)) { + infoList.add(codecInfo); + } + } + ALog.d("MediaCodecInfo list " + infoList.toString() + " found"); + return infoList; + } + + private static boolean isCodecTypeMimeType(String mimeType, MediaCodecInfo codecInfo) { + String[] types = codecInfo.getSupportedTypes(); + for (int j = 0; j < types.length; j++) { + if (types[j].equalsIgnoreCase(mimeType)) { + return true; + } + } + return false; + } +}
\ No newline at end of file diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/ColorFormatUtils.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/ColorFormatUtils.java new file mode 100644 index 000000000..94d2adf67 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/ColorFormatUtils.java @@ -0,0 +1,132 @@ +package com.batutin.android.androidvideostreaming.media; + +import android.media.MediaCodecInfo; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ColorFormatUtils { + + private static final String MIME_TYPE = "video/avc"; + public static final Map<Integer, String> colorFormatNamesMap; + + static { + Map<Integer, String> aMap = new HashMap<Integer, String>(); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatMonochrome, "COLOR_FormatMonochrome"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format8bitRGB332, "COLOR_Format8bitRGB332"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format12bitRGB444, "COLOR_Format12bitRGB444"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format16bitARGB4444, "COLOR_Format16bitARGB4444"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format16bitARGB1555, "COLOR_Format16bitARGB1555"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format16bitRGB565, "COLOR_Format16bitRGB565"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format16bitBGR565, "COLOR_Format16bitBGR565"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format18bitRGB666, "COLOR_Format18bitRGB666"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format18bitARGB1665, "COLOR_Format18bitARGB1665"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format19bitARGB1666, "COLOR_Format19bitARGB1666"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format24bitRGB888, "COLOR_Format24bitRGB888"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format24bitBGR888, "COLOR_Format24bitBGR888"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format24bitARGB1887, "COLOR_Format24bitARGB1887"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format25bitARGB1888, "COLOR_Format25bitARGB1888"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format32bitBGRA8888, "COLOR_Format32bitBGRA8888"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format32bitARGB8888, "COLOR_Format32bitARGB8888"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV411Planar, "COLOR_FormatYUV411Planar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV411PackedPlanar, "COLOR_FormatYUV411PackedPlanar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, "COLOR_FormatYUV420Planar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar, "COLOR_FormatYUV420PackedPlanar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, "COLOR_FormatYUV420SemiPlanar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422Planar, "COLOR_FormatYUV422Planar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422PackedPlanar, "COLOR_FormatYUV422PackedPlanar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422SemiPlanar, "COLOR_FormatYUV422SemiPlanar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYCbYCr, "COLOR_FormatYCbYCr"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYCrYCb, "COLOR_FormatYCrYCb"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatCbYCrY, "COLOR_FormatCbYCrY"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatCrYCbY, "COLOR_FormatCrYCbY"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV444Interleaved, "COLOR_FormatYUV444Interleaved"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatRawBayer8bit, "COLOR_FormatRawBayer8bit"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatRawBayer10bit, "COLOR_FormatRawBayer10bit"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatRawBayer8bitcompressed, "COLOR_FormatRawBayer8bitcompressed"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatL2, "COLOR_FormatL2"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatL4, "COLOR_FormatL4"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatL8, "COLOR_FormatL8"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatL16, "COLOR_FormatL16"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatL24, "COLOR_FormatL24"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatL32, "COLOR_FormatL32"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar, "COLOR_FormatYUV420PackedSemiPlanar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422PackedSemiPlanar, "COLOR_FormatYUV422PackedSemiPlanar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format18BitBGR666, "COLOR_Format18BitBGR666"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format24BitARGB6666, "COLOR_Format24BitARGB6666"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_Format24BitABGR6666, "COLOR_Format24BitABGR6666"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar, "COLOR_TI_FormatYUV420PackedSemiPlanar"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface, "COLOR_FormatSurface"); + aMap.put(MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, "COLOR_QCOM_FormatYUV420SemiPlanar"); + colorFormatNamesMap = Collections.unmodifiableMap(aMap); + } + + public static final List<Integer> acceptableColorSpaceList; + + static { + List<Integer> list = new ArrayList<Integer>(); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV411Planar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV411PackedPlanar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422Planar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422PackedPlanar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422SemiPlanar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYCbYCr); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYCrYCb); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatCbYCrY); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatCrYCbY); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV444Interleaved); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV422PackedSemiPlanar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar); + list.add(MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar); + acceptableColorSpaceList = Collections.unmodifiableList(list); + } + + public static int selectFirstVideoAvcColorFormat() throws IllegalArgumentException { + return selectFirstColorFormat(CodecInfoUtils.selectFirstVideoAvcCodec().getCapabilitiesForType(MIME_TYPE)); + } + + public static int selectFirstColorFormat(MediaCodecInfo.CodecCapabilities capabilities) throws IllegalArgumentException { + ALog.d("Start color format selecting"); + if (capabilities.colorFormats == null || capabilities.colorFormats.length == 0) { + ALog.e("Color format not found"); + throw new IllegalArgumentException("Unable to get color formats"); + } + for (int i = 0; i < capabilities.colorFormats.length; i++) { + int colorFormat = capabilities.colorFormats[i]; + if (acceptableColorSpaceList.contains(colorFormat)) { + ALog.d("Color format " + getColorSpaceName(colorFormat) + " selected"); + return colorFormat; + } + } + ALog.e("color format was not found in range" + acceptableColorSpaceList.toString()); + throw new IllegalArgumentException("color format was not found in range" + acceptableColorSpaceList.toString()); + } + + public static Map<String, Integer> getColorFormatList(MediaCodecInfo.CodecCapabilities capabilities) { + Map<String, Integer> coorFormatList = new HashMap<String, Integer>(); + for (int i = 0; i < capabilities.colorFormats.length; i++) { + int colorFormat = capabilities.colorFormats[i]; + coorFormatList.put(getColorSpaceName(colorFormat), colorFormat); + } + return coorFormatList; + } + + public static String getColorSpaceName(int colorFormat) { + if (colorFormatNamesMap.containsKey(colorFormat) == false) { + return "Unknown"; + } else { + return colorFormatNamesMap.get(colorFormat); + } + + } + +}
\ No newline at end of file diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/EncoderMediaFormatSetting.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/EncoderMediaFormatSetting.java new file mode 100644 index 000000000..59b817300 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/EncoderMediaFormatSetting.java @@ -0,0 +1,49 @@ +package com.batutin.android.androidvideostreaming.media; + +public class EncoderMediaFormatSetting { + private final int videoFrameWidth; + private final int videoFrameHeight; + private final int videoBitRate; + private final int videoFrameRate; + private final int colorFormat; + private final int frameRate; + private final String mimeType; + + public EncoderMediaFormatSetting(int videoFrameWidth, int videoFrameHeight, int videoBitRate, int videoFrameRate, int colorFormat, int frameRate, String mimeType) { + this.videoFrameWidth = videoFrameWidth; + this.videoFrameHeight = videoFrameHeight; + this.videoBitRate = videoBitRate; + this.videoFrameRate = videoFrameRate; + this.colorFormat = colorFormat; + this.frameRate = frameRate; + this.mimeType = mimeType; + } + + public int getVideoFrameWidth() { + return videoFrameWidth; + } + + public int getVideoFrameHeight() { + return videoFrameHeight; + } + + public int getVideoBitRate() { + return videoBitRate; + } + + public int getVideoFrameRate() { + return videoFrameRate; + } + + public int getColorFormat() { + return colorFormat; + } + + public int getFrameRate() { + return frameRate; + } + + public String getMimeType() { + return mimeType; + } +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaCoderState.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaCoderState.java new file mode 100644 index 000000000..e0f6a420c --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaCoderState.java @@ -0,0 +1,11 @@ +package com.batutin.android.androidvideostreaming.media; + +/** + * Created by Andrew Batutin on 8/12/13. + */ +public interface MediaCoderState { + + public void start() throws IllegalStateException; + + public void stop() throws IllegalStateException; +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaDecoder.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaDecoder.java new file mode 100644 index 000000000..3480899a2 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaDecoder.java @@ -0,0 +1,85 @@ +package com.batutin.android.androidvideostreaming.media; + +import android.media.MediaCodec; +import android.media.MediaFormat; +import android.view.Surface; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.nio.ByteBuffer; + +/** + * Created by Andrew Batutin on 8/12/13. + */ +public class MediaDecoder extends AbstractMediaCoder { + + + public MediaCodec getDecoder() { + return codec; + } + + public MediaDecoder() { + codec = createMediaDecoder(); + } + + + private MediaCodec createMediaDecoder() { + ALog.d("Start decoder creation"); + MediaCodec codec = createMediaCodec(); + ALog.d("End decoder creation"); + return codec; + } + + @Override + protected MediaCodec createMediaCodec() { + MediaCodec codec = MediaCodec.createDecoderByType(MIME_TYPE); + return codec; + } + + public void configureMediaDecoder(MediaFormat format, Surface surface) { + ALog.d("Start Decoder configure"); + mediaFormat = format; + codec.configure(getMediaFormat(), surface, null, 0); + isConfigured = true; + ALog.d("End Decoder configure"); + } + + @Override + public void start() throws IllegalStateException { + if (isRunning == false) { + ALog.d("Decoder is going to start"); + } else { + ALog.d("Decoder is already started"); + } + super.start(); + if (isRunning == true) { + ALog.d("Decoder is started"); + } else { + ALog.d("Decoder is stopped"); + } + } + + @Override + public void stop() throws IllegalStateException { + if (isRunning == true) { + ALog.d("Decoder is going to stop"); + } else { + ALog.d("Decoder is already stopped"); + } + super.stop(); + if (isRunning == false) { + ALog.d("Decoder is stopped"); + } else { + ALog.d("Decoder is running"); + } + } + + public void queueEncodedData(MediaCodec.BufferInfo info, ByteBuffer encodedData, VideoAvcCoder videoAvcCoder) { + int inputBufIndex = videoAvcCoder.getMediaDecoder().getDecoder().dequeueInputBuffer(-1); + ByteBuffer inputBuf = videoAvcCoder.getMediaDecoder().getDecoder().getInputBuffers()[inputBufIndex]; + inputBuf.clear(); + inputBuf.put(encodedData); + videoAvcCoder.getMediaDecoder().getDecoder().queueInputBuffer(inputBufIndex, 0, info.size, + info.presentationTimeUs, info.flags); + } +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaEncoder.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaEncoder.java new file mode 100644 index 000000000..8b992b25d --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaEncoder.java @@ -0,0 +1,125 @@ +package com.batutin.android.androidvideostreaming.media; + +import android.media.MediaCodec; +import android.media.MediaFormat; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +/** + * Created by Andrew Batutin on 8/9/13. + */ +public class MediaEncoder extends AbstractMediaCoder implements MediaCoderState { + + public static final int FRAME_RATE = 10; + + public MediaEncoder() { + codec = createMediaEncoder(); + } + + public MediaCodec getEncoder() { + return codec; + } + + private MediaCodec createMediaEncoder() { + ALog.d("Start codec creation"); + MediaCodec codec = createMediaCodec(); + ALog.d("End codec creation"); + return codec; + } + + @Override + protected MediaCodec createMediaCodec() { + MediaCodec codec = MediaCodec.createEncoderByType(MIME_TYPE); + return codec; + } + + @Override + public void start() throws IllegalStateException { + if (isRunning == false) { + ALog.d("Encoder is going to start"); + } else { + ALog.d("Encoder is already started"); + } + super.start(); + if (isRunning == true) { + ALog.d("Encoder is started"); + } else { + ALog.d("Encoder is stopped"); + } + } + + @Override + public void stop() throws IllegalStateException { + if (isRunning == true) { + ALog.d("Encoder is going to stop"); + } else { + ALog.d("Encoder is already stopped"); + } + super.stop(); + if (isRunning == false) { + ALog.d("Encoder is stopped"); + } else { + ALog.d("Encoder is running"); + } + } + + public void configureMediaEncoder(MediaFormat format) { + ALog.d("Start encoder configure"); + mediaFormat = format; + codec.configure(getMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + isConfigured = true; + ALog.d("End encoder configure"); + } + + public byte[] getDataToEncode(InputStream reader) { + ByteArrayOutputStream bb = new ByteArrayOutputStream(); + int res = 0; + do { + try { + res = reader.read(); + if (res != -1) { + bb.write(res); + } + } catch (IOException e) { + ALog.e(e.getMessage()); + } + } + while (res != -1 && bb.size() < frameSize()); + + try { + bb.flush(); + } catch (IOException e) { + ALog.e(e.getMessage()); + } + return bb.toByteArray(); + } + + // The size of a frame of video data, in the formats we handle, is stride*sliceHeight + // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels. Application + // of algebra and assuming that stride==width and sliceHeight==height yields: + public int frameSize() { + return getMediaFormat().getInteger(MediaFormat.KEY_WIDTH) * getMediaFormat().getInteger(MediaFormat.KEY_HEIGHT) * 3 / 2; + } + + public byte[] enqueueFrame(int inputBufIndex, long presentationTimeUs, InputStream reader) { + ByteBuffer encoderInputBuffer = getEncoder().getInputBuffers()[inputBufIndex]; + encoderInputBuffer.clear(); + byte[] dataToEncode = getDataToEncode(reader); + encoderInputBuffer.put(dataToEncode, 0, dataToEncode.length); + getEncoder().queueInputBuffer(inputBufIndex, 0, frameSize(), presentationTimeUs, 0); + return dataToEncode; + } + + // Send an empty frame with the end-of-stream flag set. If we set EOS + // on a frame with data, that frame data will be ignored, and the + // output will be short one frame. + public void enqueueEndOfStreamFrame(int inputBufIndex, long presentationTimeUs) { + getEncoder().queueInputBuffer(inputBufIndex, 0, 0, presentationTimeUs, + MediaCodec.BUFFER_FLAG_END_OF_STREAM); + } +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaFormatUtils.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaFormatUtils.java new file mode 100644 index 000000000..65f83f7f1 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/MediaFormatUtils.java @@ -0,0 +1,55 @@ +package com.batutin.android.androidvideostreaming.media; + +import android.media.CamcorderProfile; +import android.media.MediaFormat; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.nio.ByteBuffer; + +public class MediaFormatUtils { + + public static final String MIME_TYPE = "video/avc"; + + public static MediaFormat createVideoAvcEncoderMediaFormat(CamcorderProfile camcorderProfile, int colorFormat, int frameRate) { + MediaFormat mediaFormat = createEncoderMediaFormat(camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight, camcorderProfile.videoBitRate, camcorderProfile.videoFrameRate, colorFormat, frameRate, MIME_TYPE); + return mediaFormat; + } + + public static MediaFormat createEncoderMediaFormat(CamcorderProfile camcorderProfile, int colorFormat, int frameRate, String mimeType) { + MediaFormat mediaFormat = createEncoderMediaFormat(camcorderProfile.videoFrameWidth, camcorderProfile.videoFrameHeight, camcorderProfile.videoBitRate, camcorderProfile.videoFrameRate, colorFormat, frameRate, mimeType); + return mediaFormat; + } + + public static MediaFormat createEncoderMediaFormat(int videoFrameWidth, int videoFrameHeight, int videoBitRate, int videoFrameRate, int colorFormat, int frameRate, String mimeType) { + ALog.d("Start MediaFormat creation"); + MediaFormat mediaFormat = MediaFormat.createVideoFormat(mimeType, videoFrameWidth, videoFrameHeight); + mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, videoBitRate); + mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, videoFrameRate); + mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); + mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, frameRate); + ALog.d("MediaFormat " + mediaFormat.toString() + " created"); + return mediaFormat; + } + + public static MediaFormat createEncoderMediaFormat(EncoderMediaFormatSetting encoderMediaFormatSetting) { + MediaFormat mediaFormat = createEncoderMediaFormat(encoderMediaFormatSetting.getVideoFrameWidth(), encoderMediaFormatSetting.getVideoFrameHeight(), encoderMediaFormatSetting.getVideoBitRate(), encoderMediaFormatSetting.getVideoFrameRate(), encoderMediaFormatSetting.getColorFormat(), encoderMediaFormatSetting.getFrameRate(), encoderMediaFormatSetting.getMimeType()); + return mediaFormat; + } + + public static MediaFormat createDecoderMediaFormat(String mimeType, int videoFrameWidth, int videoFrameHeight, ByteBuffer csd0) { + MediaFormat format = + MediaFormat.createVideoFormat(mimeType, videoFrameWidth, videoFrameHeight); + format.setByteBuffer("csd-0", csd0); + return format; + } + + public static MediaFormat createVideoAvcDecoderMediaFormat(int videoFrameWidth, int videoFrameHeight, ByteBuffer csd0) { + MediaFormat format = + MediaFormat.createVideoFormat(MIME_TYPE, videoFrameWidth, videoFrameHeight); + format.setByteBuffer("csd-0", csd0); + return format; + } + + +}
\ No newline at end of file diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/PresentationTimeCalc.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/PresentationTimeCalc.java new file mode 100644 index 000000000..d78a826a0 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/PresentationTimeCalc.java @@ -0,0 +1,22 @@ +package com.batutin.android.androidvideostreaming.media; + +public class PresentationTimeCalc { + + + private int frameRate; + + public PresentationTimeCalc(int frameRate) { + if (frameRate < 1) { + throw new IllegalArgumentException("frame rate must be >= 1"); + } + this.frameRate = frameRate; + } + + /** + * Generates the presentation time for frame N, in microseconds. + */ + public long computePresentationTime(int frameIndex) { + if (frameIndex < 0) return 0; + return frameIndex * 1000000 / frameRate; + } +}
\ No newline at end of file diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoder.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoder.java new file mode 100755 index 000000000..48a782ce7 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoder.java @@ -0,0 +1,326 @@ +package com.batutin.android.androidvideostreaming.media; + +import android.media.CamcorderProfile; +import android.media.MediaCodec; +import android.media.MediaFormat; +import android.view.Surface; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; + +public class VideoAvcCoder { + + private VideoAvcCoderStateListener stateListener; + private VideoAvcCoderDataStreamListener streamListener; + private PresentationTimeCalc presentationTimeCalc; + private InputStream reader; + private boolean stop = false; + private MediaEncoder mediaEncoder; + private MediaDecoder mediaDecoder; + private Surface surface; + + private VideoAvcCoder() { + } + + public static VideoAvcCoder createLowQualityVideoAvcCoder(Surface surface, InputStream reader) { + return new VideoAvcCoder(surface, reader, CamcorderProfile.QUALITY_LOW); + } + + public static VideoAvcCoder createHighQualityVideoAvcCoder(Surface surface, InputStream reader) { + return new VideoAvcCoder(surface, reader, CamcorderProfile.QUALITY_HIGH); + } + + public VideoAvcCoderStateListener getStateListener() { + return stateListener; + } + + public void setStateListener(VideoAvcCoderStateListener stateListener) { + this.stateListener = stateListener; + } + + public VideoAvcCoderDataStreamListener getStreamListener() { + return streamListener; + } + + public void setStreamListener(VideoAvcCoderDataStreamListener streamListener) { + this.streamListener = streamListener; + } + + public InputStream getReader() { + return reader; + } + + public MediaEncoder getMediaEncoder() { + return mediaEncoder; + } + + public MediaDecoder getMediaDecoder() { + return mediaDecoder; + } + + public VideoAvcCoder(Surface surface, InputStream reader, int quality) { + initCoder(surface, reader, quality); + } + + private void initCoder(Surface surface, InputStream reader, int quality) { + this.surface = surface; + this.reader = reader; + MediaFormat mediaFormat = createEncoderParameters(quality); + mediaDecoder = new MediaDecoder(); + mediaEncoder = new MediaEncoder(); + mediaEncoder.configureMediaEncoder(mediaFormat); + presentationTimeCalc = new PresentationTimeCalc(MediaEncoder.FRAME_RATE); + } + + protected MediaFormat createEncoderParameters(int quality) { + CamcorderProfile camcorderProfile = CamcorderProfileUtils.getFirstCameraCamcorderProfile(quality); + int colorFormat = ColorFormatUtils.selectFirstVideoAvcColorFormat(); + MediaFormat mediaFormat = MediaFormatUtils.createVideoAvcEncoderMediaFormat(camcorderProfile, colorFormat, MediaEncoder.FRAME_RATE); + return mediaFormat; + } + + public void start() throws IllegalStateException { + if (stateListener != null) { + stateListener.videoAvcCoderShouldStart(this); + } + try { + mediaEncoder.start(); + } catch (IllegalStateException exp) { + ALog.e(exp.getMessage()); + } + if (stateListener != null) { + stateListener.videoAvcCoderStarted(this); + } + } + + public synchronized void stop() throws IllegalStateException { + try { + reader.close(); + mediaEncoder.stop(); + mediaDecoder.stop(); + } catch (IllegalStateException exp) { + ALog.e(exp.getMessage()); + } catch (IOException e) { + ALog.e(e.getMessage()); + } + if (stateListener != null) { + stateListener.videoAvcCoderStopped(this); + } + if (streamListener != null) { + streamListener.dataDecodingStopped(this); + } + + if (streamListener != null) { + streamListener.dataEncodingStopped(this); + } + } + + public synchronized void shouldStop() { + if (stateListener != null) { + stateListener.videoAvcCoderShouldStop(this); + } + this.stop = true; + } + + @Override + public String toString() { + String message = " " + mediaDecoder.toString() + " " + mediaEncoder.toString(); + return super.toString() + message; + } + + public void doEncodeDecodeVideoFromBuffer() { + + final int TIMEOUT_USEC = 10000; + if (streamListener != null) { + streamListener.dataEncodingShouldStart(this); + } + + ByteBuffer[] encoderInputBuffers = mediaEncoder.getEncoder().getInputBuffers(); + ByteBuffer[] encoderOutputBuffers = mediaEncoder.getEncoder().getOutputBuffers(); + ByteBuffer[] decoderInputBuffers = null; + ByteBuffer[] decoderOutputBuffers = null; + MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); + MediaFormat decoderOutputFormat = null; + int generateIndex = 0; + int checkIndex = 0; + + boolean decoderConfigured = false; + + // Just out of curiosity. + long rawSize = 0; + long encodedSize = 0; + + + // Loop until the output side is done. + boolean inputDone = false; + boolean encoderDone = false; + boolean outputDone = false; + if (streamListener != null) { + streamListener.dataEncodingStarted(this); + } + while (!outputDone) { + ALog.v("loop"); + + // If we're not done submitting frames, generate a new one and submit it. By + // doing this on every loop we're working to ensure that the encoder always has + // work to do. + // + // We don't really want a timeout here, but sometimes there's a delay opening + // the encoder device, so a short timeout can keep us from spinning hard. + if (!inputDone) { + int inputBufIndex = mediaEncoder.getEncoder().dequeueInputBuffer(-1); + ALog.v("inputBufIndex=" + inputBufIndex); + if (inputBufIndex >= 0) { + long presentationTimeUs = presentationTimeCalc.computePresentationTime(generateIndex); + if (stop == true) { + if (streamListener != null) { + streamListener.dataEncodingShouldStop(this); + } + mediaEncoder.enqueueEndOfStreamFrame(inputBufIndex, presentationTimeUs); + inputDone = true; + ALog.i("sent input EOS (with zero-length frame)"); + } else { + byte[] frame = mediaEncoder.enqueueFrame(inputBufIndex, presentationTimeUs, reader); + if (streamListener != null) { + streamListener.frameShouldBeEncoded(this, frame); + } + ALog.v("submitted frame " + generateIndex + " to enc"); + } + generateIndex++; + } else { + // either all in use, or we timed out during initial setup + ALog.i("input buffer not available"); + } + } + + // Check for output from the encoder. If there's no output yet, we either need to + // provide more input, or we need to wait for the encoder to work its magic. We + // can't actually tell which is the case, so if we can't get an output buffer right + // away we loop around and see if it wants more input. + // + // Once we get EOS from the encoder, we don't need to do this anymore. + if (!encoderDone) { + int encoderStatus = mediaEncoder.getEncoder().dequeueOutputBuffer(info, -1); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + ALog.i("no output from encoder available"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // not expected for an encoder + encoderOutputBuffers = mediaEncoder.getEncoder().getOutputBuffers(); + ALog.i("encoder output buffers changed"); + } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // not expected for an encoder + MediaFormat newFormat = mediaEncoder.getEncoder().getOutputFormat(); + ALog.i("encoder output format changed: " + newFormat); + } else if (encoderStatus < 0) { + ALog.e("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); + } else { // encoderStatus >= 0 + ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; + if (encodedData == null) { + ALog.i("encoderOutputBuffer " + encoderStatus + " was null"); + } + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. + encodedSize = matchBufferInfo(info, encodedSize, encodedData); + if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + // Codec config info. Only expected on first packet. One way to + // handle this is to manually stuff the data into the MediaFormat + // and pass that to configure(). We do that here to exercise the API. + if (streamListener != null) { + streamListener.settingsDataReceived(this, encodedData); + } + if (streamListener != null) { + streamListener.dataDecodingShouldStart(this); + } + startDecoderWithCodecInfo(encodedData); + ALog.i("decoder configured (" + info.size + " bytes) and started"); + if (streamListener != null) { + streamListener.dataDecodingStarted(this); + } + } else { + if (streamListener != null) { + streamListener.frameWasEncoded(this, encodedData); + } + // Get a decoder input buffer, blocking until it's available. + mediaDecoder.queueEncodedData(info, encodedData, this); + if (streamListener != null) { + streamListener.frameShouldBeDecoded(this, encodedData); + } + encoderDone = isEncoderDone(info); + ALog.v("passed " + info.size + " bytes to decoder" + + (encoderDone ? " (EOS)" : "")); + } + mediaEncoder.getEncoder().releaseOutputBuffer(encoderStatus, false); + } + } + + // Check for output from the decoder. We want to do this on every loop to avoid + // the possibility of stalling the pipeline. We use a short timeout to avoid + // burning CPU if the decoder is hard at work but the next frame isn't quite ready. + // + // If we're decoding to a Surface, we'll get notified here as usual but the + // ByteBuffer references will be null. The data is sent to Surface instead. + if (mediaDecoder.isConfigured()) { + int decoderStatus = mediaDecoder.getDecoder().dequeueOutputBuffer(info, TIMEOUT_USEC); + if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { + // no output available yet + ALog.i("no output from decoder available"); + } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { + // The storage associated with the direct ByteBuffer may already be unmapped, + // so attempting to access data through the old output buffer array could + // lead to a native crash. + ALog.i("decoder output buffers changed"); + decoderOutputBuffers = mediaDecoder.getDecoder().getOutputBuffers(); + } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { + // this happens before the first frame is returned + decoderOutputFormat = mediaDecoder.getDecoder().getOutputFormat(); + ALog.i("decoder output format changed: " + + decoderOutputFormat); + } else if (decoderStatus < 0) { + ALog.e("unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus); + } else { // decoderStatus >= 0 + + ALog.v("surface decoder given buffer " + decoderStatus + + " (size=" + info.size + ")"); + rawSize += info.size; + if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { + ALog.i("output EOS"); + outputDone = true; + if (streamListener != null) { + streamListener.dataDecodingShouldStop(this); + } + } + // As soon as we call releaseOutputBuffer, the buffer will be forwarded + // to SurfaceTexture to convert to a texture. The API doesn't guarantee + // that the texture will be available before the call returns, so we + // need to wait for the onFrameAvailable callback to fire. + mediaDecoder.getDecoder().releaseOutputBuffer(decoderStatus, true); + } + } + } + ALog.i("decoded " + checkIndex + " frames at " + + mediaEncoder.getMediaFormat().getInteger(MediaFormat.KEY_WIDTH) + "x" + mediaEncoder.getMediaFormat().getInteger(MediaFormat.KEY_HEIGHT) + ": raw=" + rawSize + ", enc=" + encodedSize); + stop(); + } + + private long matchBufferInfo(MediaCodec.BufferInfo info, long encodedSize, ByteBuffer encodedData) { + encodedData.position(info.offset); + encodedData.limit(info.offset + info.size); + encodedSize += info.size; + return encodedSize; + } + + private boolean isEncoderDone(MediaCodec.BufferInfo info) { + boolean encoderDone; + encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; + return encoderDone; + } + + private void startDecoderWithCodecInfo(ByteBuffer csd0) { + MediaFormat format = MediaFormatUtils.createVideoAvcDecoderMediaFormat(mediaEncoder.getMediaFormat().getInteger(MediaFormat.KEY_WIDTH), mediaEncoder.getMediaFormat().getInteger(MediaFormat.KEY_HEIGHT), csd0); + mediaDecoder.configureMediaDecoder(format, surface); + mediaDecoder.start(); + } +}
\ No newline at end of file diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoderDataStreamListener.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoderDataStreamListener.java new file mode 100644 index 000000000..327538fa2 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoderDataStreamListener.java @@ -0,0 +1,34 @@ +package com.batutin.android.androidvideostreaming.media; + +import java.nio.ByteBuffer; + +/** + * Created by Andrew Batutin on 8/14/13. + */ +public interface VideoAvcCoderDataStreamListener { + + public void dataEncodingShouldStart(VideoAvcCoder videoAvcCoder); + + public void dataEncodingStarted(VideoAvcCoder videoAvcCoder); + + public void frameShouldBeEncoded(VideoAvcCoder videoAvcCoder, byte[] frame); + + public void settingsDataReceived(VideoAvcCoder videoAvcCoder, ByteBuffer settingsData); + + public void frameWasEncoded(VideoAvcCoder videoAvcCoder, ByteBuffer encodedFrame); + + public void dataEncodingShouldStop(VideoAvcCoder videoAvcCoder); + + public void dataEncodingStopped(VideoAvcCoder videoAvcCoder); + + public void dataDecodingShouldStart(VideoAvcCoder videoAvcCoder); + + public void dataDecodingStarted(VideoAvcCoder videoAvcCoder); + + public void frameShouldBeDecoded(VideoAvcCoder videoAvcCoder, ByteBuffer frame); + + public void dataDecodingShouldStop(VideoAvcCoder videoAvcCoder); + + public void dataDecodingStopped(VideoAvcCoder videoAvcCoder); + +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoderStateListener.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoderStateListener.java new file mode 100644 index 000000000..49ced932b --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/media/VideoAvcCoderStateListener.java @@ -0,0 +1,13 @@ +package com.batutin.android.androidvideostreaming.media; + +/** + * Created by Andrew Batutin on 8/14/13. + */ +public interface VideoAvcCoderStateListener { + + public void videoAvcCoderShouldStart(VideoAvcCoder videoAvcCoder); + public void videoAvcCoderStarted (VideoAvcCoder videoAvcCoder); + public void videoAvcCoderShouldStop(VideoAvcCoder videoAvcCoder); + public void videoAvcCoderStopped(VideoAvcCoder videoAvcCoder); + +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/reader/AssetsReader.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/reader/AssetsReader.java new file mode 100644 index 000000000..ff2b7c40a --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/reader/AssetsReader.java @@ -0,0 +1,47 @@ +package com.batutin.android.androidvideostreaming.reader; + +import android.content.Context; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Created by admin on 7/19/13. + */ +public class AssetsReader { + + private Context context; + + + public AssetsReader(Context context) { + if (context == null) + throw new IllegalArgumentException("Be initialised with null context object should not. Yeesssssss."); + this.setContext(context); + } + + public Context getContext() { + return context; + } + + public void setContext(Context context) { + this.context = context; + } + + public InputStream readFileFromAssets(String fileName) { + InputStream inputStream = null; + try { + inputStream = getInputStreamForFile(fileName); + } catch (IOException e) { + e.printStackTrace(); + } + return inputStream; + } + + private InputStream getInputStreamForFile(String fileName) throws IOException { + if (this.getContext() == null) + throw new IllegalArgumentException("Context should be initialised before using this method"); + InputStream stream = this.getContext().getAssets().open(fileName); + return stream; + } + +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/utils/ALog.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/utils/ALog.java new file mode 100644 index 000000000..abce4d50f --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/utils/ALog.java @@ -0,0 +1,254 @@ +package com.batutin.android.androidvideostreaming.utils; + + +import android.os.Environment; +import android.util.Log; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.Writer; +import java.net.UnknownHostException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +/** + * Created by Andrew Batutin on 8/9/13. + */ +public class ALog { + private static final DateFormat FLOG_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); + private static final File LOG_DIR = new File( + Environment.getExternalStorageDirectory() + File.separator + "alog"); + ; + private static final BlockingQueue<String> logQueue = new LinkedBlockingQueue<String>(); + private static boolean fileLogging = false; + private static String tag = "<tag unset>"; + private static Level level = Level.V; + private static Runnable queueRunner = new Runnable() { + @Override + public void run() { + String line; + try { + while ((line = logQueue.take()) != null) { + + if (!Environment.getExternalStorageState().equals( + Environment.MEDIA_MOUNTED)) { + continue; + } + if (!LOG_DIR.exists() && !LOG_DIR.mkdirs()) { + continue; + } + + File logFile = new File(LOG_DIR, tag + ".log"); + Writer w = null; + try { + w = new FileWriter(logFile, true); + w.write(line); + w.close(); + } catch (IOException e) { + } finally { + if (w != null) { + try { + w.close(); + } catch (IOException e1) { + } + } + } + } + } catch (InterruptedException e) { + } + } + }; + + static { + new Thread(queueRunner).start(); + } + + private static LogContext getContext() { + StackTraceElement[] trace = Thread.currentThread().getStackTrace(); + StackTraceElement element = trace[5]; // frame below us; the caller + LogContext context = new LogContext(element); + return context; + } + + private static final String getMessage(String s, Object... args) { + if (s == null){ + s= ""; + } + s = String.format(s, args); + LogContext c = getContext(); + String msg = c.simpleClassName + "." + c.methodName + "@" + + c.lineNumber + ": " + s; + return msg; + } + + private static String getSimpleClassName(String className) { + int i = className.lastIndexOf("."); + if (i == -1) { + return className; + } + return className.substring(i + 1); + } + + public static void setLevel(Level l) { + level = l; + } + + public static void setTag(String t) { + tag = t; + } + + public static void setFileLogging(boolean enable) { + fileLogging = enable; + } + + public static void v(String format, Object... args) { + if (level.getValue() > Level.V.getValue()) { + return; + } + String msg = getMessage(format, args); + Log.v(tag, msg); + if (fileLogging) { + flog(Level.V, msg); + } + } + + public static void d(String format, Object... args) { + if (level.getValue() > Level.D.getValue()) { + return; + } + String msg = getMessage(format, args); + Log.d(tag, msg); + if (fileLogging) { + flog(Level.D, msg); + } + } + + public static void i(String format, Object... args) { + if (level.getValue() > Level.I.getValue()) { + return; + } + String msg = getMessage(format, args); + Log.i(tag, msg); + if (fileLogging) { + flog(Level.I, msg); + } + } + + public static void w(String format, Object... args) { + if (level.getValue() > Level.W.getValue()) { + return; + } + String msg = getMessage(format, args); + Log.w(tag, msg); + if (fileLogging) { + flog(Level.W, msg); + } + } + + public static void w(String format, Throwable t, Object... args) { + if (level.getValue() > Level.W.getValue()) { + return; + } + String msg = getMessage(format, args); + Log.w(tag, msg, t); + if (fileLogging) { + flog(Level.W, msg, t); + } + } + + public static void e(String format, Object... args) { + if (level.getValue() > Level.E.getValue()) { + return; + } + String msg = getMessage(format, args); + Log.e(tag, msg); + if (fileLogging) { + flog(Level.E, msg); + } + } + + public static void e(String format, Throwable t, Object... args) { + if (level.getValue() > Level.E.getValue()) { + return; + } + String msg = getMessage(format, args); + Log.e(tag, msg, t); + if (fileLogging) { + flog(Level.E, msg, t); + } + } + + public static void trace() { + try { + throw new Throwable("dumping stack trace ..."); + } catch (Throwable t) { + ALog.e("trace:", t); + } + } + + public static String getStackTraceString(Throwable tr) { + if (tr == null) { + return ""; + } + + Throwable t = tr; + while (t != null) { + if (t instanceof UnknownHostException) { + return ""; + } + t = t.getCause(); + } + + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + tr.printStackTrace(pw); + return sw.toString(); + } + + private static void flog(Level l, String msg) { + flog(l, msg, null); + } + + private static void flog(Level l, String msg, Throwable t) { + String timeString = FLOG_FORMAT.format(new Date()); + String line = timeString + " " + l.toString() + "/" + tag + ": " + msg + + "\n"; + if (t != null) { + line += getStackTraceString(t) + "\n"; + } + logQueue.offer(line); + } + + public enum Level { + V(1), D(2), I(3), W(4), E(5); + private int value; + + private Level(int value) { + this.value = value; + } + + int getValue() { + return value; + } + } + + private static class LogContext { + // String className; + String simpleClassName; + String methodName; + int lineNumber; + + LogContext(StackTraceElement element) { + // this.className = element.getClassName(); + this.simpleClassName = getSimpleClassName(element.getClassName()); + this.methodName = element.getMethodName(); + this.lineNumber = element.getLineNumber(); + } + } +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/utils/NetworkUtils.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/utils/NetworkUtils.java new file mode 100644 index 000000000..ac73bbbdf --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/utils/NetworkUtils.java @@ -0,0 +1,144 @@ +package com.batutin.android.androidvideostreaming.utils; + +import org.apache.http.conn.util.InetAddressUtils; + +import java.io.BufferedInputStream; +import java.io.ByteArrayOutputStream; +import java.io.FileInputStream; +import java.net.InetAddress; +import java.net.NetworkInterface; +import java.util.Collections; +import java.util.List; + +/** + * Created by Andrew Batutin on 8/14/13. + */ +public class NetworkUtils { + + /** + * Convert byte array to hex string + * + * @param bytes + * @return + */ + public static String bytesToHex(byte[] bytes) { + StringBuilder sbuf = new StringBuilder(); + for (int idx = 0; idx < bytes.length; idx++) { + int intVal = bytes[idx] & 0xff; + if (intVal < 0x10) sbuf.append("0"); + sbuf.append(Integer.toHexString(intVal).toUpperCase()); + } + return sbuf.toString(); + } + + /** + * Get utf8 byte array. + * + * @param str + * @return array of NULL if error was found + */ + public static byte[] getUTF8Bytes(String str) { + try { + return str.getBytes("UTF-8"); + } catch (Exception ex) { + return null; + } + } + + /** + * Load UTF8withBOM or any ansi text file. + * + * @param filename + * @return + * @throws java.io.IOException + */ + public static String loadFileAsString(String filename) throws java.io.IOException { + final int BUFLEN = 1024; + BufferedInputStream is = new BufferedInputStream(new FileInputStream(filename), BUFLEN); + try { + ByteArrayOutputStream baos = new ByteArrayOutputStream(BUFLEN); + byte[] bytes = new byte[BUFLEN]; + boolean isUTF8 = false; + int read, count = 0; + while ((read = is.read(bytes)) != -1) { + if (count == 0 && bytes[0] == (byte) 0xEF && bytes[1] == (byte) 0xBB && bytes[2] == (byte) 0xBF) { + isUTF8 = true; + baos.write(bytes, 3, read - 3); // drop UTF8 bom marker + } else { + baos.write(bytes, 0, read); + } + count += read; + } + return isUTF8 ? new String(baos.toByteArray(), "UTF-8") : new String(baos.toByteArray()); + } finally { + try { + is.close(); + } catch (Exception ex) { + } + } + } + + /** + * Returns MAC address of the given interface name. + * + * @param interfaceName eth0, wlan0 or NULL=use first interface + * @return mac address or empty string + */ + public static String getMACAddress(String interfaceName) { + try { + List<NetworkInterface> interfaces = Collections.list(NetworkInterface.getNetworkInterfaces()); + for (NetworkInterface intf : interfaces) { + if (interfaceName != null) { + if (!intf.getName().equalsIgnoreCase(interfaceName)) continue; + } + byte[] mac = intf.getHardwareAddress(); + if (mac == null) return ""; + StringBuilder buf = new StringBuilder(); + for (int idx = 0; idx < mac.length; idx++) + buf.append(String.format("%02X:", mac[idx])); + if (buf.length() > 0) buf.deleteCharAt(buf.length() - 1); + return buf.toString(); + } + } catch (Exception ex) { + } // for now eat exceptions + return ""; + /*try { + // this is so Linux hack + return loadFileAsString("/sys/class/net/" +interfaceName + "/address").toUpperCase().trim(); + } catch (IOException ex) { + return null; + }*/ + } + + /** + * Get IP address from first non-localhost interface + * + * @param useIPv4 true=return ipv4, false=return ipv6 + * @return address or empty string + */ + public static String getIPAddress(boolean useIPv4) { + try { + List<NetworkInterface> interfaces = Collections.list(NetworkInterface.getNetworkInterfaces()); + for (NetworkInterface intf : interfaces) { + List<InetAddress> addrs = Collections.list(intf.getInetAddresses()); + for (InetAddress addr : addrs) { + if (!addr.isLoopbackAddress()) { + String sAddr = addr.getHostAddress().toUpperCase(); + boolean isIPv4 = InetAddressUtils.isIPv4Address(sAddr); + if (useIPv4) { + if (isIPv4) + return sAddr; + } else { + if (!isIPv4) { + int delim = sAddr.indexOf('%'); // drop ip6 port suffix + return delim < 0 ? sAddr : sAddr.substring(0, delim); + } + } + } + } + } + } catch (Exception ex) { + } // for now eat exceptions + return ""; + } +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/BitmapGeneratorThread.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/BitmapGeneratorThread.java new file mode 100644 index 000000000..5c7b362b7 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/BitmapGeneratorThread.java @@ -0,0 +1,57 @@ +package com.batutin.android.androidvideostreaming.videopreview; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.io.OutputStream; + +/** + * Created by Andrew Batutin on 8/27/13. + */ +public class BitmapGeneratorThread extends VideoPreviewThread { + + private PlayerThreadState threadState; + private FrameDataSource dataSource; + private OutputStream outputStream; + + private boolean stop = false; + + public BitmapGeneratorThread(OutputStream outputStream, PlayerThreadState threadState, FrameDataSource dataSource) { + super(); + this.threadState = threadState; + this.dataSource = dataSource; + this.outputStream = outputStream; + } + + public synchronized void shouldStop() { + this.stop = true; + } + + public synchronized OutputStream getOutputStream() { + return outputStream; + } + + @Override + public void run() { + try { + while (!stop) { + outputStream.write(dataSource.createVideoFrame()); + outputStream.flush(); + } + outputStream.close(); + threadState.threadShouldStop(this); + } catch (Exception e) { + ALog.e(e.getMessage()); + } + } + + @Override + protected void defaultExceptionHandler(Thread paramThread, Throwable paramThrowable) { + logUncaughtException(paramThread, paramThrowable); + try { + shouldStop(); + } catch (IllegalStateException e) { + ALog.e(e.getMessage()); + } + interrupt(); + } +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/FrameDataSource.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/FrameDataSource.java new file mode 100644 index 000000000..7ba8f2447 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/FrameDataSource.java @@ -0,0 +1,9 @@ +package com.batutin.android.androidvideostreaming.videopreview; + +/** + * Created by Andrew Batutin on 8/27/13. + */ +public interface FrameDataSource { + + public byte [] createVideoFrame(); +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/PlayerThread.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/PlayerThread.java new file mode 100644 index 000000000..38243caf5 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/PlayerThread.java @@ -0,0 +1,58 @@ +package com.batutin.android.androidvideostreaming.videopreview; + +import android.view.Surface; + +import com.batutin.android.androidvideostreaming.media.VideoAvcCoder; +import com.batutin.android.androidvideostreaming.media.VideoAvcCoderDataStreamListener; +import com.batutin.android.androidvideostreaming.utils.ALog; + +import java.io.PipedInputStream; + +/** + * Created by Andrew Batutin on 8/27/13. + */ +public class PlayerThread extends VideoPreviewThread { + + private VideoAvcCoder videoAvcCoder; + private VideoAvcCoderDataStreamListener dataStreamListener; + + public PlayerThread(VideoAvcCoderDataStreamListener dataStreamListener, Surface surface, PipedInputStream pipedReader) { + super(); + this.dataStreamListener = dataStreamListener; + videoAvcCoder = VideoAvcCoder.createLowQualityVideoAvcCoder(surface, pipedReader); + videoAvcCoder.setStreamListener(dataStreamListener); + videoAvcCoder.start(); + } + + public VideoAvcCoderDataStreamListener getDataStreamListener() { + return dataStreamListener; + } + + public VideoAvcCoder getVideoAvcCoder() { + return videoAvcCoder; + } + + @Override + public String toString() { + String message = videoAvcCoder.toString(); + return super.toString() + message; + } + + @Override + public void run() { + videoAvcCoder.doEncodeDecodeVideoFromBuffer(); + } + + @Override + protected void defaultExceptionHandler(Thread paramThread, Throwable paramThrowable) { + logUncaughtException(paramThread, paramThrowable); + try { + videoAvcCoder.stop(); + } catch (IllegalStateException e) { + ALog.e(e.getMessage()); + } + interrupt(); + } +} + + diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/PlayerThreadState.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/PlayerThreadState.java new file mode 100644 index 000000000..ff140d976 --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/PlayerThreadState.java @@ -0,0 +1,9 @@ +package com.batutin.android.androidvideostreaming.videopreview; + +/** + * Created by Andrew Batutin on 8/27/13. + */ +public interface PlayerThreadState { + + public void threadShouldStop(VideoPreviewThread thread); +} diff --git a/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/VideoPreviewThread.java b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/VideoPreviewThread.java new file mode 100644 index 000000000..286de4bad --- /dev/null +++ b/SDL_Core/mobile/android/AndroidVideoStreaming/src/main/java/com/batutin/android/androidvideostreaming/videopreview/VideoPreviewThread.java @@ -0,0 +1,25 @@ +package com.batutin.android.androidvideostreaming.videopreview; + +import com.batutin.android.androidvideostreaming.utils.ALog; + +/** + * Created by Andrew Batutin on 8/27/13. + */ +public abstract class VideoPreviewThread extends Thread { + + protected VideoPreviewThread() { + this.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { + @Override + public void uncaughtException(Thread thread, Throwable ex) { + defaultExceptionHandler(thread, ex); + } + }); + } + + protected abstract void defaultExceptionHandler(Thread paramThread, Throwable paramThrowable); + + protected void logUncaughtException(Thread paramThread, Throwable paramThrowable) { + String logMessage = String.format("Thread %d Message %s", paramThread.getId(), paramThrowable.getMessage()); + ALog.e(logMessage); + } +} |