summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sdl_android/src/androidTest/java/com/smartdevicelink/api/SdlManagerTests.java2
-rw-r--r--sdl_android/src/androidTest/java/com/smartdevicelink/api/audio/AudioStreamManagerTest.java681
-rw-r--r--sdl_android/src/androidTest/res/raw/test_audio_square_250hz_80amp_1s.mp3bin0 -> 18803 bytes
-rw-r--r--sdl_android/src/main/java/com/smartdevicelink/api/SdlManager.java36
-rw-r--r--sdl_android/src/main/java/com/smartdevicelink/api/StreamingStateMachine.java2
-rw-r--r--sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoder.java90
-rw-r--r--sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoderCompat.java130
-rw-r--r--sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoderListener.java27
-rw-r--r--sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioStreamManager.java356
-rw-r--r--sdl_android/src/main/java/com/smartdevicelink/api/audio/BaseAudioDecoder.java240
-rw-r--r--sdl_android/src/main/java/com/smartdevicelink/api/audio/SampleBuffer.java280
11 files changed, 1828 insertions, 16 deletions
diff --git a/sdl_android/src/androidTest/java/com/smartdevicelink/api/SdlManagerTests.java b/sdl_android/src/androidTest/java/com/smartdevicelink/api/SdlManagerTests.java
index cf0b6f3dd..af9d38b38 100644
--- a/sdl_android/src/androidTest/java/com/smartdevicelink/api/SdlManagerTests.java
+++ b/sdl_android/src/androidTest/java/com/smartdevicelink/api/SdlManagerTests.java
@@ -167,8 +167,8 @@ public class SdlManagerTests extends AndroidTestCase {
// Note : SdlManager.initialize() will not be called automatically by proxy as in real life because we have mock proxy not a real one
sdlManager.initialize();
sdlManager.setState(BaseSubManager.READY);
+ sdlManager.getAudioStreamManager().transitionToState(BaseSubManager.READY);
// manager.getVideoStreamingManager().transitionToState(BaseSubManager.READY);
- // manager.getAudioStreamManager().transitionToState(BaseSubManager.READY);
// manager.getLockScreenManager().transitionToState(BaseSubManager.READY);
// manager.getScreenManager().transitionToState(BaseSubManager.READY);
sdlManager.getPermissionManager().transitionToState(BaseSubManager.READY);
diff --git a/sdl_android/src/androidTest/java/com/smartdevicelink/api/audio/AudioStreamManagerTest.java b/sdl_android/src/androidTest/java/com/smartdevicelink/api/audio/AudioStreamManagerTest.java
new file mode 100644
index 000000000..b0ae9cbed
--- /dev/null
+++ b/sdl_android/src/androidTest/java/com/smartdevicelink/api/audio/AudioStreamManagerTest.java
@@ -0,0 +1,681 @@
+package com.smartdevicelink.api.audio;
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.MediaFormat;
+import android.media.MediaPlayer;
+import android.os.Build;
+import android.os.Environment;
+import android.support.test.InstrumentationRegistry;
+import android.util.Log;
+
+import com.smartdevicelink.SdlConnection.SdlSession;
+import com.smartdevicelink.api.CompletionListener;
+import com.smartdevicelink.protocol.enums.SessionType;
+import com.smartdevicelink.proxy.interfaces.IAudioStreamListener;
+import com.smartdevicelink.proxy.interfaces.ISdl;
+import com.smartdevicelink.api.audio.AudioStreamManager.SampleType;
+import com.smartdevicelink.proxy.interfaces.ISdlServiceListener;
+import com.smartdevicelink.proxy.rpc.AudioPassThruCapabilities;
+import com.smartdevicelink.proxy.rpc.enums.AudioType;
+import com.smartdevicelink.proxy.rpc.enums.BitsPerSample;
+import com.smartdevicelink.proxy.rpc.enums.SamplingRate;
+import com.smartdevicelink.proxy.rpc.enums.SystemCapabilityType;
+
+import junit.framework.TestCase;
+
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.RandomAccessFile;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doCallRealMethod;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.timeout;
+import static org.mockito.Mockito.verify;
+
+public class AudioStreamManagerTest extends TestCase {
+ public static final String TAG = AudioStreamManagerTest.class.getSimpleName();
+ private Context mContext;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ mContext = InstrumentationRegistry.getContext();
+ }
+
+ @Override
+ public void tearDown() throws Exception {
+ super.tearDown();
+ mContext = null;
+ }
+
+ public void testCreatingAudioStreamManager() {
+ ISdl internalInterface = mock(ISdl.class);
+ AudioPassThruCapabilities audioCapabilities = new AudioPassThruCapabilities(SamplingRate._16KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ doReturn(true).when(internalInterface).isConnected();
+ doReturn(audioCapabilities).when(internalInterface).getCapability(SystemCapabilityType.PCM_STREAMING);
+
+ new AudioStreamManager(internalInterface, mContext);
+ }
+
+ public void testStartAudioStreamManager() {
+ final SdlSession mockSession = mock(SdlSession.class);
+
+ Answer<Void> audioServiceAnswer = new Answer<Void>() {
+ ISdlServiceListener serviceListener = null;
+ @Override
+ public Void answer(InvocationOnMock invocation) {
+ Method method = invocation.getMethod();
+ Object[] args = invocation.getArguments();
+
+ switch (method.getName()) {
+ case "addServiceListener":
+ // parameters (SessionType serviceType, ISdlServiceListener sdlServiceListener);
+ SessionType sessionType = (SessionType) args[0];
+ assertEquals(sessionType, SessionType.PCM);
+ serviceListener = (ISdlServiceListener) args[1];
+ break;
+ case "startAudioService":
+ // parameters (boolean encrypted, AudioStreamingCodec codec, AudioStreamingParams params);
+ Boolean encrypted = (Boolean) args[0];
+ serviceListener.onServiceStarted(mockSession, SessionType.PCM, encrypted);
+ break;
+ case "stopAudioService":
+ // parameters ()
+ serviceListener.onServiceEnded(mockSession, SessionType.PCM);
+ break;
+ }
+
+ return null;
+ }
+ };
+
+ ISdl internalInterface = mock(ISdl.class);
+ AudioPassThruCapabilities audioCapabilities = new AudioPassThruCapabilities(SamplingRate._16KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ doReturn(true).when(internalInterface).isConnected();
+ doReturn(audioCapabilities).when(internalInterface).getCapability(SystemCapabilityType.PCM_STREAMING);
+ doAnswer(audioServiceAnswer).when(internalInterface).addServiceListener(any(SessionType.class), any(ISdlServiceListener.class));
+ doAnswer(audioServiceAnswer).when(internalInterface).startAudioService(any(Boolean.class));
+ doAnswer(audioServiceAnswer).when(internalInterface).stopAudioService();
+
+ CompletionListener completionListener = new CompletionListener() {
+ @Override
+ public void onComplete(boolean success) {
+ assertEquals(true, success);
+ }
+ };
+
+ CompletionListener mockListener = spy(completionListener);
+ AudioStreamManager manager = new AudioStreamManager(internalInterface, mContext);
+
+ manager.startAudioStream(false, mockListener);
+ manager.stopAudioStream(mockListener);
+ verify(mockListener, timeout(10000).times(2)).onComplete(any(Boolean.class));
+ }
+
+ public void testWithSquareSampleAudio16BitAnd8KhzApi16() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 16);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._8KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(8000, SampleType.SIGNED_16_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio16BitAnd16KhzApi16() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 16);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._16KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(16000, SampleType.SIGNED_16_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio16BitAnd22KhzApi16() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 16);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._22KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(22050, SampleType.SIGNED_16_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio16BitAnd44KhzApi16() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 16);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._44KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(44100, SampleType.SIGNED_16_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio8BitAnd8KhzApi16() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 16);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._8KHZ, BitsPerSample._8_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(8000, SampleType.UNSIGNED_8_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio8BitAnd16KhzApi16() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 16);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._16KHZ, BitsPerSample._8_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(16000, SampleType.UNSIGNED_8_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio8BitAnd22KhzApi16() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 16);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._22KHZ, BitsPerSample._8_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(22050, SampleType.UNSIGNED_8_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio8BitAnd44KhzApi16() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 16);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._44KHZ, BitsPerSample._8_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(44100, SampleType.UNSIGNED_8_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio16BitAnd8KhzApi21() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 21);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._8KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(8000, SampleType.SIGNED_16_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio16BitAnd16KhzApi21() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 21);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._16KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(16000, SampleType.SIGNED_16_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio16BitAnd22KhzApi21() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 21);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._22KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(22050, SampleType.SIGNED_16_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio16BitAnd44KhzApi21() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 21);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._44KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(44100, SampleType.SIGNED_16_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio8BitAnd8KhzApi21() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 21);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._8KHZ, BitsPerSample._8_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(8000, SampleType.UNSIGNED_8_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio8BitAnd16KhzApi21() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 21);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._16KHZ, BitsPerSample._8_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(16000, SampleType.UNSIGNED_8_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio8BitAnd22KhzApi21() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 21);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._22KHZ, BitsPerSample._8_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(22050, SampleType.UNSIGNED_8_BIT, audioPassThruCapabilities);
+ }
+
+ public void testWithSquareSampleAudio8BitAnd44KhzApi21() throws Exception {
+ setFinalStatic(Build.VERSION.class.getField("SDK_INT"), 21);
+ AudioPassThruCapabilities audioPassThruCapabilities = new AudioPassThruCapabilities(SamplingRate._44KHZ, BitsPerSample._8_BIT, AudioType.PCM);
+ runFullAudioManagerDecodeFlowWithSquareSampleAudio(44100, SampleType.UNSIGNED_8_BIT, audioPassThruCapabilities);
+ }
+
+ private int testFullAudioManagerDecodeFlowCorrectCounter = 0;
+ private int testFullAudioManagerDecodeFlowWrongCounter = 0;
+ private void runFullAudioManagerDecodeFlowWithSquareSampleAudio(final int sampleRate, final @SampleType int sampleType, final AudioPassThruCapabilities audioCapabilities) {
+ testFullAudioManagerDecodeFlowCorrectCounter = 0;
+ testFullAudioManagerDecodeFlowWrongCounter = 0;
+
+ IAudioStreamListener audioStreamListener = new IAudioStreamListener() {
+ @Override
+ public void sendAudio(byte[] data, int offset, int length, long presentationTimeUs) throws ArrayIndexOutOfBoundsException {
+ ByteBuffer buffer = ByteBuffer.wrap(data, offset, length);
+ this.sendAudio(buffer, presentationTimeUs);
+ }
+
+ @Override
+ public void sendAudio(ByteBuffer data, long presentationTimeUs) {
+ SampleBuffer samples = SampleBuffer.wrap(data, sampleType, presentationTimeUs);
+ double timeUs = presentationTimeUs;
+ double sampleDurationUs = 1000000.0 / sampleRate;
+
+ for (int i = 0; i < samples.limit(); ++i) {
+ double sample = samples.get(i);
+ double edge = timeUs % 4000.0;
+
+ if (edge > 2000.0) {
+ // swap sample as it's negative expected
+ sample = sample * -1.0;
+ }
+
+ edge = edge % 2000.0;
+
+ // at the edge of a wave the sample can be lower than 0.7
+ if ((sample > 0.7 && sample < 0.95) || (edge < sampleDurationUs || (2000.0 - sampleDurationUs) < edge)) {
+ testFullAudioManagerDecodeFlowCorrectCounter++;
+ } else {
+ testFullAudioManagerDecodeFlowWrongCounter++;
+ }
+
+ timeUs += sampleDurationUs;
+ }
+ }
+ };
+
+ final SdlSession mockSession = mock(SdlSession.class);
+ doReturn(audioStreamListener).when(mockSession).startAudioStream();
+
+ Answer<Void> audioServiceAnswer = new Answer<Void>() {
+ ISdlServiceListener serviceListener = null;
+ @Override
+ public Void answer(InvocationOnMock invocation) {
+ Method method = invocation.getMethod();
+ Object[] args = invocation.getArguments();
+
+ switch (method.getName()) {
+ case "addServiceListener":
+ // (SessionType serviceType, ISdlServiceListener sdlServiceListener);
+ SessionType sessionType = (SessionType) args[0];
+ assertEquals(sessionType, SessionType.PCM);
+
+ serviceListener = (ISdlServiceListener) args[1];
+ break;
+ case "startAudioService":
+ //(boolean encrypted, AudioStreamingCodec codec, AudioStreamingParams params);
+ Boolean encrypted = (Boolean) args[0];
+ serviceListener.onServiceStarted(mockSession, SessionType.PCM, encrypted);
+ break;
+ case "stopAudioService":
+ // parameters ()
+ serviceListener.onServiceEnded(mockSession, SessionType.PCM);
+ break;
+ }
+
+ return null;
+ }
+ };
+
+ ISdl internalInterface = mock(ISdl.class);
+ doReturn(true).when(internalInterface).isConnected();
+ doReturn(audioCapabilities).when(internalInterface).getCapability(any(SystemCapabilityType.class));
+ doAnswer(audioServiceAnswer).when(internalInterface).addServiceListener(any(SessionType.class), any(ISdlServiceListener.class));
+ doAnswer(audioServiceAnswer).when(internalInterface).startAudioService(any(Boolean.class));
+ doAnswer(audioServiceAnswer).when(internalInterface).stopAudioService();
+
+ CompletionListener fileCompletionListener = new CompletionListener() {
+ @Override
+ public void onComplete(boolean success) {
+ assertEquals(true, success);
+
+ // not more than 2.5 percent samples must be wrong
+ double relation = 100.0 * (double)testFullAudioManagerDecodeFlowWrongCounter / (double)testFullAudioManagerDecodeFlowCorrectCounter;
+ Log.v(TAG, "Validating number of correct samples (" + Math.round(relation) + "%)");
+ if (relation > 2.5) {
+ fail("Validating raw audio failed. " + Math.round(relation) + " % wrong samples detected. Correct: " + testFullAudioManagerDecodeFlowCorrectCounter + ", Wrong: " + testFullAudioManagerDecodeFlowWrongCounter);
+ }
+ }
+ };
+
+ final CompletionListener mockFileListener = spy(fileCompletionListener);
+
+ final AudioStreamManager manager = new AudioStreamManager(internalInterface, mContext);
+ manager.startAudioStream(false, new CompletionListener() {
+ @Override
+ public void onComplete(boolean success) {
+ assertEquals(true, success);
+
+ manager.pushResource(com.smartdevicelink.test.R.raw.test_audio_square_250hz_80amp_1s, mockFileListener);
+ }
+ });
+
+ verify(mockFileListener, timeout(10000)).onComplete(any(Boolean.class));
+ }
+
+ public void testSampleAtTargetTimeReturnNull() {
+ BaseAudioDecoder mockDecoder = mock(BaseAudioDecoder.class, Mockito.CALLS_REAL_METHODS);
+ Method sampleAtTargetMethod = getSampleAtTargetMethod();
+ SampleBuffer sample = SampleBuffer.allocate(1, SampleType.SIGNED_16_BIT, ByteOrder.LITTLE_ENDIAN, 1);
+ Double result;
+ try {
+ result = (Double) sampleAtTargetMethod.invoke(mockDecoder, 1.0, sample, 1, 3, 2);
+ assertNull(result);
+ result = (Double) sampleAtTargetMethod.invoke(mockDecoder, 1.0, sample, 5, 3, 1);
+ assertNull(result);
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail();
+ }
+ }
+
+ public void testSampleAtTargetTimeReturnLastOutputSample() {
+ BaseAudioDecoder mockDecoder = mock(BaseAudioDecoder.class, Mockito.CALLS_REAL_METHODS);
+ Method sampleAtTargetMethod = getSampleAtTargetMethod();
+ SampleBuffer sample = SampleBuffer.allocate(1, SampleType.SIGNED_16_BIT, ByteOrder.LITTLE_ENDIAN, 1);
+ Double result;
+ Double lastOutputSample = 15.0;
+ try {
+ result = (Double) sampleAtTargetMethod.invoke(mockDecoder, lastOutputSample, sample, 6, 1, 5);
+ assertTrue(result.doubleValue() == lastOutputSample);
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail();
+ }
+ }
+
+ public void testSampleAtTargetTimeReturnOutputSampleGet() {
+ BaseAudioDecoder mockDecoder = mock(BaseAudioDecoder.class, Mockito.CALLS_REAL_METHODS);
+ Method sampleAtTargetMethod = getSampleAtTargetMethod();
+ SampleBuffer sample = SampleBuffer.allocate(10, SampleType.SIGNED_16_BIT, ByteOrder.LITTLE_ENDIAN, 1);
+ Double result;
+ try {
+ result = (Double) sampleAtTargetMethod.invoke(mockDecoder, 1.0, sample, 1, 1, 2);
+ assertTrue(result == sample.get(1));
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail();
+ }
+ }
+
+ public void testSampleAtTargetTime() {
+ BaseAudioDecoder mockDecoder = mock(BaseAudioDecoder.class, Mockito.CALLS_REAL_METHODS);
+ Method sampleAtTargetMethod = getSampleAtTargetMethod();
+ SampleBuffer sample = SampleBuffer.allocate(10, SampleType.SIGNED_16_BIT, ByteOrder.LITTLE_ENDIAN, 1);
+ Double result;
+ try {
+ result = (Double) sampleAtTargetMethod.invoke(mockDecoder, 1.0, sample, 1, 3, 2);
+ assertNotNull(result);
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail();
+ }
+ }
+
+ public void testOutputFormatChanged() {
+ BaseAudioDecoder mockDecoder = mock(BaseAudioDecoder.class, Mockito.CALLS_REAL_METHODS);
+
+ try {
+ Field outputChannelCountField = BaseAudioDecoder.class.getDeclaredField("outputChannelCount");
+ Field outputSampleRateField = BaseAudioDecoder.class.getDeclaredField("outputSampleRate");
+ Field outputSampleTypeField = BaseAudioDecoder.class.getDeclaredField("outputSampleType");
+
+ outputChannelCountField.setAccessible(true);
+ outputSampleRateField.setAccessible(true);
+ outputSampleTypeField.setAccessible(true);
+
+ // channel count, sample rate, sample type
+ int key_channel_count = 0, key_sample_rate = 1, key_sample_type = 2, key_sample_type_result = 3;
+ int[][] tests = new int[][] {
+ { 47, 42000, AudioFormat.ENCODING_PCM_8BIT, SampleType.UNSIGNED_8_BIT },
+ { 2, 16000, AudioFormat.ENCODING_PCM_16BIT, SampleType.SIGNED_16_BIT },
+ { 1, 22050, AudioFormat.ENCODING_PCM_FLOAT, SampleType.FLOAT },
+ { 3, 48000, AudioFormat.ENCODING_INVALID, SampleType.SIGNED_16_BIT },
+ };
+
+ for (int[] test : tests) {
+ int channel_count = test[key_channel_count];
+ int sample_rate = test[key_sample_rate];
+ int sample_type = test[key_sample_type];
+ int sample_type_result = test[key_sample_type_result];
+
+ MediaFormat format = new MediaFormat();
+
+ format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, channel_count);
+ format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sample_rate);
+ format.setInteger(MediaFormat.KEY_PCM_ENCODING, sample_type);
+
+ // in case the phone version is old the method does not take sample type into account but
+ // always expected 16 bit. See https://developer.android.com/reference/android/media/MediaFormat.html#KEY_PCM_ENCODING
+ if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.N) {
+ sample_type_result = SampleType.SIGNED_16_BIT;
+ }
+
+ mockDecoder.onOutputFormatChanged(format);
+
+ int output_channel_count = outputChannelCountField.getInt(mockDecoder);
+ int output_sample_rate = outputSampleRateField.getInt(mockDecoder);
+ int output_sample_type = outputSampleTypeField.getInt(mockDecoder);
+
+ // changing from assertEquals to if and fail so travis gives better results
+
+ if (channel_count != output_channel_count) {
+ fail("AssertEqualsFailed: channel_count == output_channel_count (" + channel_count + " == " + output_channel_count + ")");
+ }
+
+ if (sample_rate != output_sample_rate) {
+ fail("AssertEqualsFailed: sample_rate == output_sample_rate (" + sample_rate + " == " + output_sample_rate + ")");
+ }
+
+ if (sample_type_result != output_sample_type) {
+ fail("Assert: sample_type_result == output_sample_type (" + sample_type_result + " == " + output_sample_type + ")");
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail();
+ }
+ }
+
+ public void testPlayAudioFileForManualTest() throws IOException {
+ AudioPassThruCapabilities audioCapabilities = new AudioPassThruCapabilities(SamplingRate._16KHZ, BitsPerSample._16_BIT, AudioType.PCM);
+ final int sampleType = SampleType.SIGNED_16_BIT;
+ final int sampleRate = 16000;
+
+ final File outputFile = new File(mContext.getCacheDir(), "test_audio_file.wav");
+ final FileOutputStream fileOutputStream = new FileOutputStream(outputFile);
+ writeWaveHeader(fileOutputStream, sampleRate, sampleType << 3);
+
+ IAudioStreamListener audioStreamListener = new IAudioStreamListener() {
+ long audioLength = 0;
+
+ @Override
+ public void sendAudio(byte[] data, int offset, int length, long presentationTimeUs) throws ArrayIndexOutOfBoundsException {
+ ByteBuffer buffer = ByteBuffer.wrap(data, offset, length);
+ this.sendAudio(buffer, presentationTimeUs);
+ }
+
+ @Override
+ public void sendAudio(ByteBuffer data, long presentationTimeUs) {
+ try {
+ long length = data.limit();
+ byte[] d = data.array();
+ fileOutputStream.write(d, 0, (int) length);
+
+ audioLength += length;
+ RandomAccessFile raf = new RandomAccessFile(outputFile, "rw");
+ updateWaveHeaderLength(raf, audioLength);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ };
+
+ final SdlSession mockSession = mock(SdlSession.class);
+ doReturn(audioStreamListener).when(mockSession).startAudioStream();
+
+ Answer<Void> audioServiceAnswer = new Answer<Void>() {
+ ISdlServiceListener serviceListener = null;
+ @Override
+ public Void answer(InvocationOnMock invocation) {
+ Method method = invocation.getMethod();
+ Object[] args = invocation.getArguments();
+
+ switch (method.getName()) {
+ case "addServiceListener":
+ // (SessionType serviceType, ISdlServiceListener sdlServiceListener);
+ SessionType sessionType = (SessionType) args[0];
+ assertEquals(sessionType, SessionType.PCM);
+
+ serviceListener = (ISdlServiceListener) args[1];
+ break;
+ case "startAudioService":
+ //(boolean encrypted, AudioStreamingCodec codec, AudioStreamingParams params);
+ Boolean encrypted = (Boolean) args[0];
+ serviceListener.onServiceStarted(mockSession, SessionType.PCM, encrypted);
+ break;
+ case "stopAudioService":
+ // parameters ()
+ serviceListener.onServiceEnded(mockSession, SessionType.PCM);
+ break;
+ }
+
+ return null;
+ }
+ };
+
+ ISdl internalInterface = mock(ISdl.class);
+ doReturn(true).when(internalInterface).isConnected();
+ doReturn(audioCapabilities).when(internalInterface).getCapability(any(SystemCapabilityType.class));
+ doAnswer(audioServiceAnswer).when(internalInterface).addServiceListener(any(SessionType.class), any(ISdlServiceListener.class));
+ doAnswer(audioServiceAnswer).when(internalInterface).startAudioService(any(Boolean.class));
+ doAnswer(audioServiceAnswer).when(internalInterface).stopAudioService();
+
+ final MediaPlayer.OnCompletionListener mockPlayerCompletionListener = mock(MediaPlayer.OnCompletionListener.class);
+ final MediaPlayer player = new MediaPlayer();
+ player.setOnCompletionListener(mockPlayerCompletionListener);
+
+ CompletionListener fileCompletionListener = new CompletionListener() {
+ @Override
+ public void onComplete(boolean success) {
+ try {
+ fileOutputStream.flush();
+ fileOutputStream.close();
+
+ player.setDataSource(outputFile.getPath());
+ player.prepare();
+ player.start();
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ };
+
+ final CompletionListener mockFileListener = spy(fileCompletionListener);
+
+ final AudioStreamManager manager = new AudioStreamManager(internalInterface, mContext);
+ manager.startAudioStream(false, new CompletionListener() {
+ @Override
+ public void onComplete(boolean success) {
+ assertEquals(true, success);
+
+ manager.pushResource(com.smartdevicelink.test.R.raw.test_audio_square_250hz_80amp_1s, mockFileListener);
+ }
+ });
+
+ verify(mockFileListener, timeout(10000)).onComplete(any(Boolean.class));
+ verify(mockPlayerCompletionListener, timeout(10000)).onCompletion(any(MediaPlayer.class));
+ }
+
+ private Method getSampleAtTargetMethod() {
+ Method method = null;
+ try {
+ method = BaseAudioDecoder.class.getDeclaredMethod("sampleAtTargetTime",
+ double.class, SampleBuffer.class, double.class, double.class, double.class);
+ method.setAccessible(true);
+ } catch (NoSuchMethodException e) {
+ e.printStackTrace();
+ fail();
+ }
+ return method;
+ }
+
+ private void setFinalStatic(Field field, Object newValue) throws Exception {
+ field.setAccessible(true);
+ field.set(null, newValue);
+ }
+
+ private void writeWaveHeader(OutputStream stream, long samplerate, long bitspersample) throws IOException {
+ byte[] header = new byte[44];
+ // the data header is 36 bytes large
+ long datalength = 36;
+ long audiolength = 0;
+ long format = 1; // 1 = PCM
+ long channels = 1;
+ long blockalign = (channels * bitspersample) >> 3;
+ long byterate = (samplerate * channels * bitspersample) >> 3;
+
+ // RIFF header.
+ header[0] = 'R';
+ header[1] = 'I';
+ header[2] = 'F';
+ header[3] = 'F';
+ // Total data length (UInt32).
+ header[4] = (byte)((datalength) & 0xff);
+ header[5] = (byte)((datalength >> 8) & 0xff);
+ header[6] = (byte)((datalength >> 16) & 0xff);
+ header[7] = (byte)((datalength >> 24) & 0xff);
+ // WAVE header.
+ header[8] = 'W';
+ header[9] = 'A';
+ header[10] = 'V';
+ header[11] = 'E';
+ // Format (fmt) header.
+ header[12] = 'f';
+ header[13] = 'm';
+ header[14] = 't';
+ header[15] = ' ';
+ // Format header size (UInt32).
+ header[16] = 16;
+ header[17] = 0;
+ header[18] = 0;
+ header[19] = 0;
+ // Format type (UInt16). Set 1 for PCM.
+ header[20] = (byte)((format) & 0xff);
+ header[21] = (byte)((format >> 8) & 0xff);
+ // Channels
+ header[22] = (byte)((channels) & 0xff);
+ header[23] = (byte)((channels >> 8) & 0xff);
+ // Sample rate (UInt32).
+ header[24] = (byte)((samplerate) & 0xff);
+ header[25] = (byte)((samplerate >> 8) & 0xff);
+ header[26] = (byte)((samplerate >> 16) & 0xff);
+ header[27] = (byte)((samplerate >> 24) & 0xff);
+ // Byte rate (UInt32).
+ header[28] = (byte)((byterate) & 0xff);
+ header[29] = (byte)((byterate >> 8) & 0xff);
+ header[30] = (byte)((byterate >> 16) & 0xff);
+ header[31] = (byte)((byterate >> 24) & 0xff);
+ // Block alignment (UInt16).
+ header[32] = (byte)((blockalign) & 0xff);
+ header[33] = (byte)((blockalign >> 8) & 0xff);
+ // Bits per sample (UInt16).
+ header[34] = (byte)((bitspersample) & 0xff);
+ header[35] = (byte)((bitspersample >> 8) & 0xff);
+ // Data header
+ header[36] = 'd';
+ header[37] = 'a';
+ header[38] = 't';
+ header[39] = 'a';
+ // Total audio length (UInt32).
+ header[40] = (byte)((audiolength) & 0xff);
+ header[41] = (byte)((audiolength >> 8) & 0xff);
+ header[42] = (byte)((audiolength >> 16) & 0xff);
+ header[43] = (byte)((audiolength >> 24) & 0xff);
+
+ stream.write(header, 0, header.length);
+ }
+
+ /** Updates the data length and audio length of an existing RIFF/WAVE header in the file pointed by the RandomAccessFile object. */
+ private void updateWaveHeaderLength(RandomAccessFile stream, long audiolength) throws IOException {
+ // the data header is 36 bytes large
+ long datalength = 36 + audiolength;
+
+ // Seek from the beginning to data length
+ stream.seek(4);
+ // Overwrite total data length
+ stream.write((int)((datalength) & 0xff));
+ stream.write((int)((datalength >> 8) & 0xff));
+ stream.write((int)((datalength >> 16) & 0xff));
+ stream.write((int)((datalength >> 24) & 0xff));
+ // Seek from the end of data length to audio length
+ stream.seek(40);
+ // overwrite total audio length
+ stream.write((int)((audiolength) & 0xff));
+ stream.write((int)((audiolength >> 8) & 0xff));
+ stream.write((int)((audiolength >> 16) & 0xff));
+ stream.write((int)((audiolength >> 24) & 0xff));
+ }
+}
diff --git a/sdl_android/src/androidTest/res/raw/test_audio_square_250hz_80amp_1s.mp3 b/sdl_android/src/androidTest/res/raw/test_audio_square_250hz_80amp_1s.mp3
new file mode 100644
index 000000000..6108d27ec
--- /dev/null
+++ b/sdl_android/src/androidTest/res/raw/test_audio_square_250hz_80amp_1s.mp3
Binary files differ
diff --git a/sdl_android/src/main/java/com/smartdevicelink/api/SdlManager.java b/sdl_android/src/main/java/com/smartdevicelink/api/SdlManager.java
index 79b666fd9..3a332e8dd 100644
--- a/sdl_android/src/main/java/com/smartdevicelink/api/SdlManager.java
+++ b/sdl_android/src/main/java/com/smartdevicelink/api/SdlManager.java
@@ -1,10 +1,12 @@
package com.smartdevicelink.api;
import android.content.Context;
+import android.os.Build;
import android.support.annotation.NonNull;
import android.util.Log;
import com.smartdevicelink.api.PermissionManager.PermissionManager;
+import com.smartdevicelink.api.audio.AudioStreamManager;
import com.smartdevicelink.api.datatypes.SdlArtwork;
import com.smartdevicelink.api.lockscreen.LockScreenConfig;
import com.smartdevicelink.api.lockscreen.LockScreenManager;
@@ -79,9 +81,10 @@ public class SdlManager{
private PermissionManager permissionManager;
private VideoStreamingManager videoStreamingManager;
private FileManager fileManager;
+ private AudioStreamManager audioStreamManager;
private LockScreenManager lockScreenManager;
- /*
- private AudioStreamManager audioStreamManager;
+
+ /*
private ScreenManager screenManager;
*/
@@ -124,10 +127,10 @@ public class SdlManager{
if(
permissionManager != null && permissionManager.getState() != BaseSubManager.SETTING_UP &&
fileManager != null && fileManager.getState() != BaseSubManager.SETTING_UP &&
+ audioStreamManager != null && audioStreamManager.getState() != BaseSubManager.SETTING_UP &&
(videoStreamingManager == null || (videoStreamingManager != null && videoStreamingManager.getState() != BaseSubManager.SETTING_UP)) &&
lockScreenManager != null && lockScreenManager.getState() != BaseSubManager.SETTING_UP
/*
- audioStreamManager != null && audioStreamManager.getState() != BaseSubManager.SETTING_UP &&
screenManager != null && screenManager.getState() != BaseSubManager.SETTING_UP
*/
){
@@ -170,6 +173,12 @@ public class SdlManager{
this.fileManager = new FileManager(_internalInterface, context);
this.fileManager.start(subManagerListener);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
+ this.audioStreamManager = new AudioStreamManager(_internalInterface, context);
+ this.audioStreamManager.start(subManagerListener);
+ } else {
+ this.audioStreamManager = null;
+ }
if (lockScreenConfig.isEnabled()) {
this.lockScreenManager = new LockScreenManager(lockScreenConfig, context, _internalInterface);
@@ -179,8 +188,6 @@ public class SdlManager{
/*
this.screenManager = new ScreenManager(_internalInterface, this.fileManager);
this.screenManager.start(subManagerListener);
- this.audioStreamManager = new AudioStreamManager(_internalInterface);
- this.audioStreamManager.start(subManagerListener);
*/
}
@@ -194,11 +201,14 @@ public class SdlManager{
if (this.lockScreenManager != null) {
this.lockScreenManager.dispose();
}
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
+ if (this.audioStreamManager != null) {
+ this.audioStreamManager.dispose();
+ }
+ }
/*
- this.audioStreamManager.dispose();
this.screenManager.dispose();
- this.audioStreamManager.dispose();
*/
if(managerListener != null){
managerListener.onDestroy();
@@ -446,17 +456,15 @@ public class SdlManager{
}
- /**
- * Gets the AudioStreamManager. <br>
- * <strong>Note: AudioStreamManager should be used only after SdlManager.start() CompletionListener callback is completed successfully.</strong>
- * @return a AudioStreamManager object
- */
- /*
+ /**
+ * Gets the AudioStreamManager. <br>
+ * <strong>Note: AudioStreamManager should be used only after SdlManager.start() CompletionListener callback is completed successfully.</strong>
+ * @return a AudioStreamManager object
+ */
public AudioStreamManager getAudioStreamManager() {
checkSdlManagerState();
return audioStreamManager;
}
- */
/**
* Gets the ScreenManager. <br>
diff --git a/sdl_android/src/main/java/com/smartdevicelink/api/StreamingStateMachine.java b/sdl_android/src/main/java/com/smartdevicelink/api/StreamingStateMachine.java
index dd77031b9..04c99e93c 100644
--- a/sdl_android/src/main/java/com/smartdevicelink/api/StreamingStateMachine.java
+++ b/sdl_android/src/main/java/com/smartdevicelink/api/StreamingStateMachine.java
@@ -69,4 +69,4 @@ public class StreamingStateMachine {
}
return false;
}
-}
+} \ No newline at end of file
diff --git a/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoder.java b/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoder.java
new file mode 100644
index 000000000..78e2907e1
--- /dev/null
+++ b/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoder.java
@@ -0,0 +1,90 @@
+package com.smartdevicelink.api.audio;
+
+import android.content.Context;
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.net.Uri;
+import android.os.Build;
+import android.support.annotation.NonNull;
+import android.support.annotation.RequiresApi;
+import android.util.Log;
+import com.smartdevicelink.api.audio.AudioStreamManager.SampleType;
+
+import java.nio.ByteBuffer;
+
+/**
+ * The audio decoder to decode a single audio file to PCM.
+ */
+@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+public class AudioDecoder extends BaseAudioDecoder {
+ private static final String TAG = AudioDecoder.class.getSimpleName();
+
+ /**
+ * Creates a new object of AudioDecoder.
+ * @param audioSource The audio source to decode.
+ * @param context The context object to use to open the audio source.
+ * @param sampleRate The desired sample rate for decoded audio data.
+ * @param sampleType The desired sample type (8bit, 16bit, float).
+ * @param listener A listener who receives the decoded audio.
+ */
+ AudioDecoder(Uri audioSource, Context context, int sampleRate, @SampleType int sampleType, AudioDecoderListener listener) {
+ super(audioSource, context, sampleRate, sampleType, listener);
+ }
+
+ /**
+ * Starts the audio decoding asynchronously.
+ */
+ public void start() {
+ try {
+ initMediaComponents();
+
+ decoder.setCallback(new MediaCodec.Callback() {
+ @Override
+ public void onInputBufferAvailable(@NonNull MediaCodec mediaCodec, int i) {
+ ByteBuffer inputBuffer = mediaCodec.getInputBuffer(i);
+ if (inputBuffer == null) return;
+
+ MediaCodec.BufferInfo info = AudioDecoder.super.onInputBufferAvailable(extractor, inputBuffer);
+ mediaCodec.queueInputBuffer(i, info.offset, info.size, info.presentationTimeUs, info.flags);
+ }
+
+ @Override
+ public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec, int i, @NonNull MediaCodec.BufferInfo bufferInfo) {
+ ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(i);
+ if (outputBuffer == null) return;
+
+ if (outputBuffer.limit() > 0) {
+ SampleBuffer targetSampleBuffer = AudioDecoder.super.onOutputBufferAvailable(outputBuffer);
+ AudioDecoder.this.listener.onAudioDataAvailable(targetSampleBuffer);
+ } else {
+ Log.w(TAG, "output buffer empty. Chance that silence was detected");
+ }
+
+ mediaCodec.releaseOutputBuffer(i, false);
+
+ if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
+ listener.onDecoderFinish(true);
+ stop();
+ }
+ }
+
+ @Override
+ public void onOutputFormatChanged(@NonNull MediaCodec mediaCodec, @NonNull MediaFormat mediaFormat) {
+ AudioDecoder.super.onOutputFormatChanged(mediaFormat);
+ }
+
+ @Override
+ public void onError(@NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
+ AudioDecoder.super.onMediaCodecError(e);
+ }
+ });
+
+ decoder.start();
+ } catch (Exception e) {
+ e.printStackTrace();
+ listener.onDecoderError(e);
+ listener.onDecoderFinish(false);
+ stop();
+ }
+ }
+} \ No newline at end of file
diff --git a/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoderCompat.java b/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoderCompat.java
new file mode 100644
index 000000000..e22de5468
--- /dev/null
+++ b/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoderCompat.java
@@ -0,0 +1,130 @@
+package com.smartdevicelink.api.audio;
+
+import android.content.Context;
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.net.Uri;
+import android.os.AsyncTask;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.support.annotation.NonNull;
+import android.support.annotation.RequiresApi;
+import android.util.Log;
+
+import com.smartdevicelink.api.audio.AudioStreamManager.SampleType;
+
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+
+/**
+ * The audio decoder to decode a single audio file to PCM.
+ * This decoder supports phones with api < 21 but uses methods deprecated with api 21.
+ */
+@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
+public class AudioDecoderCompat extends BaseAudioDecoder {
+ private static final String TAG = AudioDecoderCompat.class.getSimpleName();
+ private static final int DEQUEUE_TIMEOUT = 3000;
+ private static Runnable sRunnable;
+ private Thread mThread;
+
+ /**
+ * Creates a new object of AudioDecoder.
+ * @param audioSource The audio source to decode.
+ * @param context The context object to use to open the audio source.
+ * @param sampleRate The desired sample rate for decoded audio data.
+ * @param sampleType The desired sample type (8bit, 16bit, float).
+ * @param listener A listener who receives the decoded audio.
+ */
+ AudioDecoderCompat(@NonNull Uri audioSource, @NonNull Context context, int sampleRate, @SampleType int sampleType, AudioDecoderListener listener) {
+ super(audioSource, context, sampleRate, sampleType, listener);
+ }
+
+ /**
+ * Starts the audio decoding asynchronously.
+ */
+ public void start() {
+ try {
+ initMediaComponents();
+ decoder.start();
+ mThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ decodeAudio(AudioDecoderCompat.this);
+ }
+ });
+ mThread.start();
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ this.listener.onDecoderError(e);
+ this.listener.onDecoderFinish(false);
+ stop();
+ }
+ }
+
+ /**
+ * Decodes all audio data from source
+ * @param ref instance of this class
+ */
+ private void decodeAudio(AudioDecoderCompat ref) {
+ WeakReference<AudioDecoderCompat> weakReference = new WeakReference<>(ref);
+ final AudioDecoderCompat reference = weakReference.get();
+ if (reference == null) {
+ listener.onDecoderFinish(false);
+ Log.w(TAG, "AudioDecoderCompat reference was null");
+ return;
+ }
+ ByteBuffer[] inputBuffersArray = reference.decoder.getInputBuffers();
+ ByteBuffer[] outputBuffersArray = reference.decoder.getOutputBuffers();
+ MediaCodec.BufferInfo outputBufferInfo = new MediaCodec.BufferInfo();
+ MediaCodec.BufferInfo inputBufferInfo;
+ ByteBuffer inputBuffer, outputBuffer;
+ SampleBuffer sampleBuffer;
+
+ while (true) {
+ int inputBuffersArrayIndex = 0;
+ while (inputBuffersArrayIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
+ inputBuffersArrayIndex = reference.decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT);
+ if (inputBuffersArrayIndex >= 0) {
+ inputBuffer = inputBuffersArray[inputBuffersArrayIndex];
+ inputBufferInfo = onInputBufferAvailable(extractor, inputBuffer);
+ reference.decoder.queueInputBuffer(inputBuffersArrayIndex, inputBufferInfo.offset, inputBufferInfo.size, inputBufferInfo.presentationTimeUs, inputBufferInfo.flags);
+ }
+ }
+
+ int outputBuffersArrayIndex = 0;
+ while (outputBuffersArrayIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
+ outputBuffersArrayIndex = reference.decoder.dequeueOutputBuffer(outputBufferInfo, DEQUEUE_TIMEOUT);
+ if (outputBuffersArrayIndex >= 0) {
+ outputBuffer = outputBuffersArray[outputBuffersArrayIndex];
+ if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outputBufferInfo.size != 0) {
+ reference.decoder.releaseOutputBuffer(outputBuffersArrayIndex, false);
+ } else if (outputBuffer.limit() > 0) {
+ sampleBuffer = onOutputBufferAvailable(outputBuffer);
+ listener.onAudioDataAvailable(sampleBuffer);
+ reference.decoder.releaseOutputBuffer(outputBuffersArrayIndex, false);
+ }
+ } else if (outputBuffersArrayIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ MediaFormat newFormat = reference.decoder.getOutputFormat();
+ onOutputFormatChanged(newFormat);
+ }
+ }
+
+ if (outputBufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
+ if (listener != null) {
+ listener.onDecoderFinish(true);
+ }
+ stop();
+ try {
+ mThread.interrupt();
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ mThread = null;
+ break;
+ }
+ }
+ }
+ }
+}
diff --git a/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoderListener.java b/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoderListener.java
new file mode 100644
index 000000000..1ed30ebf2
--- /dev/null
+++ b/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioDecoderListener.java
@@ -0,0 +1,27 @@
+package com.smartdevicelink.api.audio;
+
+/**
+ * An interface for the audio decoder classes.
+ * The caller using the audio decoder will be
+ * notified when the decoding is finished or if an error occurred.
+ * During decoding the caller receives sample buffers with decoded audio data.
+ */
+public interface AudioDecoderListener {
+ /**
+ * Notifies that decoded audio data is available.
+ * @param sampleBuffer The sample buffer holding the decoded audio data.
+ */
+ void onAudioDataAvailable(SampleBuffer sampleBuffer);
+
+ /**
+ * Notifies that the audio decoding is finished.
+ * @param success Indicates whether audio decoding was successful or if an error occurred.
+ */
+ void onDecoderFinish(boolean success);
+
+ /**
+ * Notifies the caller that an error/exception occurred during audio decoding.
+ * @param e The exception storing information about the error.
+ */
+ void onDecoderError(Exception e);
+}
diff --git a/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioStreamManager.java b/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioStreamManager.java
new file mode 100644
index 000000000..5410e42bf
--- /dev/null
+++ b/sdl_android/src/main/java/com/smartdevicelink/api/audio/AudioStreamManager.java
@@ -0,0 +1,356 @@
+package com.smartdevicelink.api.audio;
+
+import android.content.ContentResolver;
+import android.content.Context;
+import android.content.res.Resources;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Handler;
+import android.os.Looper;
+import android.support.annotation.IntDef;
+import android.support.annotation.NonNull;
+import android.support.annotation.RequiresApi;
+import android.util.Log;
+
+import com.smartdevicelink.SdlConnection.SdlSession;
+import com.smartdevicelink.api.BaseSubManager;
+import com.smartdevicelink.api.CompletionListener;
+import com.smartdevicelink.api.StreamingStateMachine;
+import com.smartdevicelink.protocol.enums.SessionType;
+import com.smartdevicelink.proxy.interfaces.IAudioStreamListener;
+import com.smartdevicelink.proxy.interfaces.ISdl;
+import com.smartdevicelink.proxy.interfaces.ISdlServiceListener;
+import com.smartdevicelink.proxy.rpc.AudioPassThruCapabilities;
+import com.smartdevicelink.proxy.rpc.enums.SystemCapabilityType;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.ref.WeakReference;
+import java.util.LinkedList;
+import java.util.Queue;
+
+/**
+ * The AudioStreamManager class provides methods to start and stop an audio stream
+ * to the connected device. Audio files can be pushed to the manager in order to
+ * play them on the connected device. The manager uses the Android built-in MediaCodec.
+ */
+@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
+public class AudioStreamManager extends BaseSubManager {
+ private static final String TAG = AudioStreamManager.class.getSimpleName();
+ private static final int COMPLETION_TIMEOUT = 2000;
+
+ private IAudioStreamListener sdlAudioStream;
+ private int sdlSampleRate;
+ private @SampleType int sdlSampleType;
+ private final Queue<BaseAudioDecoder> queue;
+ private final WeakReference<Context> context;
+ private final StreamingStateMachine streamingStateMachine;
+
+ // This completion listener is used as a callback to the app developer when starting/stopping audio service
+ private CompletionListener serviceCompletionListener;
+ // As the internal interface does not provide timeout we need to use a future task
+ private final Handler serviceCompletionHandler;
+
+ private final Runnable serviceCompletionTimeoutCallback = new Runnable() {
+ @Override
+ public void run() {
+ serviceListener.onServiceError(null, SessionType.PCM, "Service operation timeout reached");
+ }
+ };
+
+
+
+ // INTERNAL INTERFACE
+
+ private final ISdlServiceListener serviceListener = new ISdlServiceListener() {
+ @Override
+ public void onServiceStarted(SdlSession session, SessionType type, boolean isEncrypted) {
+ if (SessionType.PCM.equals(type)) {
+ serviceCompletionHandler.removeCallbacks(serviceCompletionTimeoutCallback);
+
+ sdlAudioStream = session.startAudioStream();
+ streamingStateMachine.transitionToState(StreamingStateMachine.STARTED);
+
+ if (serviceCompletionListener != null) {
+ CompletionListener completionListener = serviceCompletionListener;
+ serviceCompletionListener = null;
+ completionListener.onComplete(true);
+ }
+ }
+ }
+
+ @Override
+ public void onServiceEnded(SdlSession session, SessionType type) {
+ if (SessionType.PCM.equals(type)) {
+ serviceCompletionHandler.removeCallbacks(serviceCompletionTimeoutCallback);
+
+ session.stopAudioStream();
+ sdlAudioStream = null;
+ streamingStateMachine.transitionToState(StreamingStateMachine.NONE);
+
+ if (serviceCompletionListener != null) {
+ CompletionListener completionListener = serviceCompletionListener;
+ serviceCompletionListener = null;
+ completionListener.onComplete(true);
+ }
+ }
+ }
+
+ @Override
+ public void onServiceError(SdlSession session, SessionType type, String reason) {
+ if (SessionType.PCM.equals(type)) {
+ serviceCompletionHandler.removeCallbacks(serviceCompletionTimeoutCallback);
+
+ streamingStateMachine.transitionToState(StreamingStateMachine.ERROR);
+ Log.e(TAG, "OnServiceError: " + reason);
+ streamingStateMachine.transitionToState(StreamingStateMachine.NONE);
+
+ if (serviceCompletionListener != null) {
+ CompletionListener completionListener = serviceCompletionListener;
+ serviceCompletionListener = null;
+ completionListener.onComplete(false);
+ }
+ }
+ }
+ };
+
+ /**
+ * Creates a new object of AudioStreamManager
+ * @param internalInterface The internal interface to the connected device.
+ */
+ public AudioStreamManager(@NonNull ISdl internalInterface, @NonNull Context context) {
+ super(internalInterface);
+ this.queue = new LinkedList<>();
+ this.context = new WeakReference<>(context);
+ this.serviceCompletionHandler = new Handler(Looper.getMainLooper());
+
+ internalInterface.addServiceListener(SessionType.PCM, serviceListener);
+
+ streamingStateMachine = new StreamingStateMachine();
+ transitionToState(BaseSubManager.READY);
+ }
+
+ @Override
+ public void dispose() {
+ stopAudioStream(new CompletionListener() {
+ @Override
+ public void onComplete(boolean success) {
+ internalInterface.removeServiceListener(SessionType.PCM, serviceListener);
+ }
+ });
+
+ super.dispose();
+ }
+
+ /**
+ * Starts the audio service and audio stream to the connected device.
+ * The method is non-blocking.
+ * @param encrypted Specify whether or not the audio stream should be encrypted.
+ */
+ public void startAudioStream(boolean encrypted, final CompletionListener completionListener) {
+ // audio stream cannot be started without a connected internal interface
+ if (!internalInterface.isConnected()) {
+ Log.w(TAG, "startAudioStream called without being connected.");
+ finish(completionListener, false);
+ return;
+ }
+
+ // streaming state must be NONE (starting the service is ready. starting stream is started)
+ if (streamingStateMachine.getState() != StreamingStateMachine.NONE) {
+ Log.w(TAG, "startAudioStream called but streamingStateMachine is not in state NONE (current: " + streamingStateMachine.getState() + ")");
+ finish(completionListener, false);
+ return;
+ }
+
+ AudioPassThruCapabilities capabilities = (AudioPassThruCapabilities) internalInterface.getCapability(SystemCapabilityType.PCM_STREAMING);
+
+ if (capabilities != null) {
+ switch (capabilities.getSamplingRate()) {
+ case _8KHZ:
+ sdlSampleRate = 8000;
+ break;
+ case _16KHZ:
+ sdlSampleRate = 16000;
+ break;
+ case _22KHZ:
+ // common sample rate is 22050, not 22000
+ // see https://en.wikipedia.org/wiki/Sampling_(signal_processing)#Audio_sampling
+ sdlSampleRate = 22050;
+ break;
+ case _44KHZ:
+ // 2x 22050 is 44100
+ // see https://en.wikipedia.org/wiki/Sampling_(signal_processing)#Audio_sampling
+ sdlSampleRate = 44100;
+ break;
+ default:
+ finish(completionListener, false);
+ return;
+ }
+
+ switch (capabilities.getBitsPerSample()) {
+ case _8_BIT:
+ sdlSampleType = SampleType.UNSIGNED_8_BIT;
+ break;
+ case _16_BIT:
+ sdlSampleType = SampleType.SIGNED_16_BIT;
+ break;
+ default:
+ finish(completionListener, false);
+ return;
+
+ }
+ } else {
+ finish(completionListener, false);
+ return;
+ }
+
+ streamingStateMachine.transitionToState(StreamingStateMachine.READY);
+ serviceCompletionListener = completionListener;
+ serviceCompletionHandler.postDelayed(serviceCompletionTimeoutCallback, COMPLETION_TIMEOUT);
+ internalInterface.startAudioService(encrypted);
+ }
+
+ /**
+ * Makes the callback to the listener
+ * @param listener the listener to notify
+ * @param isSuccess flag to notify
+ */
+ private void finish(CompletionListener listener, boolean isSuccess) {
+ if (listener != null) {
+ listener.onComplete(isSuccess);
+ }
+ }
+
+ /**
+ * Stops the audio service and audio stream to the connected device.
+ * The method is non-blocking.
+ */
+ public void stopAudioStream(final CompletionListener completionListener) {
+ if (!internalInterface.isConnected()) {
+ Log.w(TAG, "stopAudioStream called without being connected");
+ finish(completionListener, false);
+ return;
+ }
+
+ // streaming state must be STARTED (starting the service is ready. starting stream is started)
+ if (streamingStateMachine.getState() != StreamingStateMachine.STARTED) {
+ Log.w(TAG, "stopAudioStream called but streamingStateMachine is not STARTED (current: " + streamingStateMachine.getState() + ")");
+ finish(completionListener, false);
+ return;
+ }
+
+ streamingStateMachine.transitionToState(StreamingStateMachine.STOPPED);
+ serviceCompletionListener = completionListener;
+ serviceCompletionHandler.postDelayed(serviceCompletionTimeoutCallback, COMPLETION_TIMEOUT);
+ internalInterface.stopAudioService();
+ }
+
+ /**
+ * Pushes the specified resource file to the playback queue.
+ * The audio file will be played immediately. If another audio file is currently playing
+ * the specified file will stay queued and automatically played when ready.
+ * @param resourceId The specified resource file to be played.
+ * @param completionListener A completion listener that informs when the audio file is played.
+ */
+ public void pushResource(int resourceId, final CompletionListener completionListener) {
+ Context c = context.get();
+ Resources r = c.getResources();
+ Uri uri = new Uri.Builder()
+ .scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
+ .authority(r.getResourcePackageName(resourceId))
+ .appendPath(r.getResourceTypeName(resourceId))
+ .appendPath(r.getResourceEntryName(resourceId))
+ .build();
+
+ this.pushAudioSource(uri, completionListener);
+ }
+
+ /**
+ * Pushes the specified audio file to the playback queue.
+ * The audio file will be played immediately. If another audio file is currently playing
+ * the specified file will stay queued and automatically played when ready.
+ * @param audioSource The specified audio file to be played.
+ * @param completionListener A completion listener that informs when the audio file is played.
+ */
+ @SuppressWarnings("WeakerAccess")
+ public void pushAudioSource(Uri audioSource, final CompletionListener completionListener) {
+ // streaming state must be STARTED (starting the service is ready. starting stream is started)
+ if (streamingStateMachine.getState() != StreamingStateMachine.STARTED) {
+ return;
+ }
+
+ BaseAudioDecoder decoder;
+ AudioDecoderListener decoderListener = new AudioDecoderListener() {
+ @Override
+ public void onAudioDataAvailable(SampleBuffer buffer) {
+ sdlAudioStream.sendAudio(buffer.getByteBuffer(), buffer.getPresentationTimeUs());
+ }
+
+ @Override
+ public void onDecoderFinish(boolean success) {
+ finish(completionListener, true);
+
+ synchronized (queue) {
+ // remove throws an exception if the queue is empty. The decoder of this listener
+ // should still be in this queue so we should be fine by just removing it
+ // if the queue is empty than we have a bug somewhere in the code
+ // and we deserve the crash...
+ queue.remove();
+
+ // if the queue contains more items then start the first one (without removing it)
+ if (queue.size() > 0) {
+ queue.element().start();
+ }
+ }
+ }
+
+ @Override
+ public void onDecoderError(Exception e) {
+ Log.e(TAG, "decoder error", e);
+ }
+ };
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ decoder = new AudioDecoder(audioSource, context.get(), sdlSampleRate, sdlSampleType, decoderListener);
+ } else {
+ // this BaseAudioDecoder subclass uses methods deprecated with api 21
+ decoder = new AudioDecoderCompat(audioSource, context.get(), sdlSampleRate, sdlSampleType, decoderListener);
+ }
+
+ synchronized (queue) {
+ queue.add(decoder);
+
+ if (queue.size() == 1) {
+ decoder.start();
+ }
+ }
+ }
+
+ @IntDef({SampleType.UNSIGNED_8_BIT, SampleType.SIGNED_16_BIT, SampleType.FLOAT})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SampleType {
+ // ref https://developer.android.com/reference/android/media/AudioFormat "Encoding" section
+ // The audio sample is a 8 bit unsigned integer in the range [0, 255], with a 128 offset for zero.
+ // This is typically stored as a Java byte in a byte array or ByteBuffer. Since the Java byte is
+ // signed, be careful with math operations and conversions as the most significant bit is inverted.
+ //
+ // The unsigned byte range is [0, 255] and should be converted to double [-1.0, 1.0]
+ // The 8 bits of the byte are easily converted to int by using bitwise operator
+ int UNSIGNED_8_BIT = Byte.SIZE >> 3;
+
+ // ref https://developer.android.com/reference/android/media/AudioFormat "Encoding" section
+ // The audio sample is a 16 bit signed integer typically stored as a Java short in a short array,
+ // but when the short is stored in a ByteBuffer, it is native endian (as compared to the default Java big endian).
+ // The short has full range from [-32768, 32767], and is sometimes interpreted as fixed point Q.15 data.
+ //
+ // the conversion is slightly easier from [-32768, 32767] to [-1.0, 1.0]
+ int SIGNED_16_BIT = Short.SIZE >> 3;
+
+ // ref https://developer.android.com/reference/android/media/AudioFormat "Encoding" section
+ // Introduced in API Build.VERSION_CODES.LOLLIPOP, this encoding specifies that the audio sample
+ // is a 32 bit IEEE single precision float. The sample can be manipulated as a Java float in a
+ // float array, though within a ByteBuffer it is stored in native endian byte order. The nominal
+ // range of ENCODING_PCM_FLOAT audio data is [-1.0, 1.0].
+ int FLOAT = Float.SIZE >> 3;
+ }
+}
diff --git a/sdl_android/src/main/java/com/smartdevicelink/api/audio/BaseAudioDecoder.java b/sdl_android/src/main/java/com/smartdevicelink/api/audio/BaseAudioDecoder.java
new file mode 100644
index 000000000..ca74e7d85
--- /dev/null
+++ b/sdl_android/src/main/java/com/smartdevicelink/api/audio/BaseAudioDecoder.java
@@ -0,0 +1,240 @@
+package com.smartdevicelink.api.audio;
+
+import android.content.Context;
+import android.media.AudioFormat;
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.net.Uri;
+import android.os.Build;
+import android.support.annotation.NonNull;
+import android.support.annotation.RequiresApi;
+import android.util.Log;
+import com.smartdevicelink.api.audio.AudioStreamManager.SampleType;
+import com.smartdevicelink.proxy.rpc.AudioPassThruCapabilities;
+
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+@SuppressWarnings("WeakerAccess")
+@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
+public abstract class BaseAudioDecoder {
+ private static final String TAG = AudioDecoder.class.getSimpleName();
+
+ protected final int targetSampleRate;
+ protected @SampleType final int targetSampleType;
+
+ private int outputChannelCount;
+ private int outputSampleRate;
+ private @SampleType int outputSampleType;
+
+ private double lastOutputSample = 0;
+
+ private double lastOutputPresentationTimeUs = 0;
+ private double lastTargetPresentationTimeUs = 0;
+
+ protected MediaExtractor extractor;
+ protected MediaCodec decoder;
+
+ protected Uri audioSource;
+ protected Context context;
+ protected final AudioDecoderListener listener;
+
+ /**
+ *
+ * @param audioSource Uri of the audio source to be converted
+ * @param context the context
+ * @param sampleRate can be either 8000, 16000, 22050 or 44100
+ * @see AudioPassThruCapabilities#getSamplingRate()
+ * @param sampleType can be either UNSIGNED_8_BIT, SIGNED_16_BIT, FLOAT
+ * @see SampleType
+ * @param listener listener for event callbacks
+ */
+ public BaseAudioDecoder(@NonNull Uri audioSource, @NonNull Context context, int sampleRate, @SampleType int sampleType, AudioDecoderListener listener) {
+ this.audioSource = audioSource;
+ this.context = context;
+ this.listener = listener;
+
+ targetSampleRate = sampleRate;
+ targetSampleType = sampleType;
+ }
+
+ protected void initMediaComponents() throws Exception {
+ extractor = new MediaExtractor();
+ WeakReference<Context> weakRef = new WeakReference<>(context);
+ Context contextRef = weakRef.get();
+ extractor.setDataSource(contextRef, audioSource, null);
+ MediaFormat format = null;
+ String mime = null;
+
+ // Select the first audio track we find.
+ int numTracks = extractor.getTrackCount();
+ for (int i = 0; i < numTracks; ++i) {
+ MediaFormat f = extractor.getTrackFormat(i);
+ String m = f.getString(MediaFormat.KEY_MIME);
+ if (m.startsWith("audio/")) {
+ format = f;
+ mime = m;
+ extractor.selectTrack(i);
+ break;
+ }
+ }
+
+ if (mime == null) {
+ throw new Exception("The audio file " + audioSource.getPath() + " doesn't contain an audio track.");
+ }
+
+ decoder = MediaCodec.createDecoderByType(mime);
+ decoder.configure(format, null, null, 0);
+ }
+
+ private Double sampleAtTargetTime(double lastOutputSample, SampleBuffer outputSampleBuffer, double outputPresentationTimeUs, double outputDurationPerSampleUs, double targetPresentationTimeUs) {
+ double timeDiff = targetPresentationTimeUs - outputPresentationTimeUs;
+ double index = timeDiff / outputDurationPerSampleUs;
+
+ // the "last known sample" allows an index from -1.0 to 0
+ // the index cannot exceed the last sample. it must be stored to become the "last known sample" in the next iteration
+ if (index < -1.0 || Math.ceil(index) >= outputSampleBuffer.limit()) {
+ return null;
+ }
+
+ if (index == -1.0) {
+ // the index points exactly to the last known sample
+ return lastOutputSample;
+ } else if (index % 1 == 0) {
+ // index has no digits. therefore current index points to a known sample
+ return outputSampleBuffer.get((int) index);
+ } else {
+ // the first sample can be the last known one
+ double first = index < 0.0 ? lastOutputSample : outputSampleBuffer.get((int) index);
+ double second = outputSampleBuffer.get((int) Math.ceil(index));
+ double rel = index % 1;
+
+ // if the relative is between -1 and 0
+ if (rel < 0.0) {
+ rel = 1 + rel;
+ }
+
+ return first + (second - first) * rel;
+ }
+ }
+
+ protected MediaCodec.BufferInfo onInputBufferAvailable(@NonNull MediaExtractor extractor, @NonNull ByteBuffer inputBuffer) {
+ long sampleTime = extractor.getSampleTime();
+ int counter = 0;
+ int maxresult = 0;
+ int result;
+ boolean advanced = false;
+
+ do {
+ result = extractor.readSampleData(inputBuffer, counter);
+ if (result >= 0) {
+ advanced = extractor.advance();
+ maxresult = Math.max(maxresult, result);
+ counter += result;
+ }
+ } while (result >= 0 && advanced && inputBuffer.capacity() - inputBuffer.limit() > maxresult);
+ // the remaining capacity should be more than enough for another sample data block
+
+ // queue the input buffer. At end of file counter will be 0 and flags marks end of stream
+ // offset MUST be 0. The output buffer code cannot handle offsets
+ // result < 0 means the end of the file input is reached
+ int flags = advanced ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+
+ MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ bufferInfo.set(0, counter, sampleTime, flags);
+
+ return bufferInfo;
+ }
+
+ protected SampleBuffer onOutputBufferAvailable(@NonNull ByteBuffer outputBuffer) {
+ double outputPresentationTimeUs = lastOutputPresentationTimeUs;
+ double outputDurationPerSampleUs = 1000000.0 / (double)outputSampleRate;
+
+ double targetPresentationTimeUs = lastTargetPresentationTimeUs;
+ double targetDurationPerSampleUs = 1000000.0 / (double)targetSampleRate;
+
+ // wrap the output buffer to make it provide audio samples
+ SampleBuffer outputSampleBuffer = SampleBuffer.wrap(outputBuffer, outputSampleType, outputChannelCount, (long)outputPresentationTimeUs);
+ outputSampleBuffer.position(0);
+
+ // the buffer size is related to the output and target sample rate
+ // add 2 samples to round up and add an extra sample
+ int sampleSize = outputSampleBuffer.limit() * targetSampleRate / outputSampleRate + 2;
+
+ SampleBuffer targetSampleBuffer = SampleBuffer.allocate(sampleSize, targetSampleType, ByteOrder.LITTLE_ENDIAN, (long)targetPresentationTimeUs);
+ Double sample;
+
+ do {
+ sample = sampleAtTargetTime(lastOutputSample, outputSampleBuffer, outputPresentationTimeUs, outputDurationPerSampleUs, targetPresentationTimeUs);
+ if (sample != null) {
+ targetSampleBuffer.put(sample);
+ targetPresentationTimeUs += targetDurationPerSampleUs;
+ }
+ } while (sample != null);
+
+ lastTargetPresentationTimeUs = targetPresentationTimeUs;
+ lastOutputPresentationTimeUs += outputSampleBuffer.limit() * outputDurationPerSampleUs;
+ lastOutputSample = outputSampleBuffer.get(outputSampleBuffer.limit() - 1);
+
+ targetSampleBuffer.limit(targetSampleBuffer.position());
+ targetSampleBuffer.position(0);
+
+ return targetSampleBuffer;
+ }
+
+ protected void onOutputFormatChanged(@NonNull MediaFormat mediaFormat) {
+ if (mediaFormat.containsKey(MediaFormat.KEY_CHANNEL_COUNT)) {
+ outputChannelCount = mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+ }
+
+ if (mediaFormat.containsKey(MediaFormat.KEY_SAMPLE_RATE)) {
+ outputSampleRate = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
+ }
+
+ if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N && mediaFormat.containsKey(MediaFormat.KEY_PCM_ENCODING)) {
+ int key = mediaFormat.getInteger(MediaFormat.KEY_PCM_ENCODING);
+ switch (key) {
+ case AudioFormat.ENCODING_PCM_8BIT:
+ outputSampleType = SampleType.UNSIGNED_8_BIT;
+ break;
+ case AudioFormat.ENCODING_PCM_FLOAT:
+ outputSampleType = SampleType.FLOAT;
+ break;
+ case AudioFormat.ENCODING_PCM_16BIT:
+ default:
+ // by default we fallback to signed 16 bit samples
+ outputSampleType = SampleType.SIGNED_16_BIT;
+ break;
+ }
+ } else {
+ outputSampleType = SampleType.SIGNED_16_BIT;
+ }
+ }
+
+ protected void onMediaCodecError(@NonNull MediaCodec.CodecException e) {
+ Log.e(TAG, "MediaCodec.onError: " + e.getLocalizedMessage());
+ if (listener != null) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ listener.onDecoderError(e);
+ } else {
+ listener.onDecoderError(new Exception("Error decoding audio file"));
+ }
+ }
+ }
+
+ public abstract void start();
+
+ public void stop() {
+ if (decoder != null) {
+ decoder.stop();
+ decoder.release();
+ decoder = null;
+ }
+
+ if (extractor != null) {
+ extractor = null;
+ }
+ }
+}
diff --git a/sdl_android/src/main/java/com/smartdevicelink/api/audio/SampleBuffer.java b/sdl_android/src/main/java/com/smartdevicelink/api/audio/SampleBuffer.java
new file mode 100644
index 000000000..949e75e20
--- /dev/null
+++ b/sdl_android/src/main/java/com/smartdevicelink/api/audio/SampleBuffer.java
@@ -0,0 +1,280 @@
+package com.smartdevicelink.api.audio;
+
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import com.smartdevicelink.api.audio.AudioStreamManager.SampleType;
+
+/**
+ * Wraps a buffer of raw audio samples depending on the sample type (8 bit, 16 bit)
+ * Unifies samples into double.
+ */
+public class SampleBuffer {
+ private static final String TAG = SampleBuffer.class.getSimpleName();
+
+ @SuppressWarnings({"unused", "FieldCanBeLocal"})
+ private @SampleType final int sampleType;
+ private final ByteBuffer byteBuffer;
+ private final int channelCount;
+ private final long presentationTimeUs;
+
+ /**
+ * Wraps a raw (mono) byte buffer to a new sample buffer.
+ * @param buffer The raw buffer to be wrapped.
+ * @param sampleType The sample type of the samples in the raw buffer.
+ * @param presentationTimeUs The presentation time of the buffer.
+ * @return A new sample buffer wrapping the specified raw buffer.
+ */
+ public static SampleBuffer wrap(ByteBuffer buffer, @SampleType int sampleType, long presentationTimeUs) {
+ return new SampleBuffer(buffer, sampleType, 1, presentationTimeUs);
+ }
+
+ /**
+ * Wraps a raw byte buffer to a new sample buffer.
+ * @param buffer The raw buffer to be wrapped.
+ * @param sampleType The sample type of the samples in the raw buffer.
+ * @param channelCount The number of channels (1 = mono, 2 = stereo).
+ * @param presentationTimeUs The presentation time of the buffer.
+ * @return A new sample buffer wrapping the specified raw buffer.
+ */
+ public static SampleBuffer wrap(ByteBuffer buffer, @SampleType int sampleType, int channelCount, long presentationTimeUs) {
+ return new SampleBuffer(buffer, sampleType, channelCount, presentationTimeUs);
+ }
+
+ /**
+ * Allocates a new sample buffer.
+ * @param capacity The specified sample capacity of the sample buffer.
+ * @param sampleType The sample type of the samples the buffer should store.
+ * @param byteOrder The byte order for the samples (little or big endian).
+ * @param presentationTimeUs The presentation time for the buffer.
+ * @return A new and empty sample buffer.
+ */
+ public static SampleBuffer allocate(int capacity, @SampleType int sampleType, ByteOrder byteOrder, long presentationTimeUs) {
+ return new SampleBuffer(capacity, sampleType, 1, byteOrder, presentationTimeUs);
+ }
+
+ /**
+ * Allocates a new sample buffer.
+ * @param capacity The specified sample capacity of the sample buffer.
+ * @param sampleType The sample type of the samples the buffer should store.
+ * @param channelCount The number of channels (1 = mono, 2 = stereo).
+ * @param byteOrder The byte order for the samples (little or big endian).
+ * @param presentationTimeUs The presentation time for the buffer.
+ * @return A new and empty sample buffer.
+ */
+ @SuppressWarnings("unused")
+ public static SampleBuffer allocate(int capacity, @SampleType int sampleType, int channelCount, ByteOrder byteOrder, long presentationTimeUs) {
+ return new SampleBuffer(capacity, sampleType, channelCount, byteOrder, presentationTimeUs);
+ }
+
+ private SampleBuffer(int capacity, @SampleType int sampleType, int channelCount, ByteOrder byteOrder, long presentationTimeUs) {
+ this.byteBuffer = ByteBuffer.allocate(sampleType * capacity);
+ this.byteBuffer.order(byteOrder);
+ this.sampleType = sampleType;
+ this.channelCount = channelCount;
+ this.presentationTimeUs = presentationTimeUs;
+ }
+
+ private SampleBuffer(ByteBuffer buffer, @SampleType int sampleType, int channelCount, long presentationTimeUs) {
+ this.byteBuffer = buffer;
+ this.sampleType = sampleType;
+ this.channelCount = channelCount;
+ this.presentationTimeUs = presentationTimeUs;
+ }
+
+ /**
+ * Returns the capacity of the buffer per channel.
+ */
+ @SuppressWarnings("unused")
+ public int capacity() {
+ return byteBuffer.capacity() / sampleType / channelCount;
+ }
+
+ /**
+ * Returns the number of samples in the buffer per channel.
+ */
+ public int limit() {
+ return byteBuffer.limit() / sampleType / channelCount;
+ }
+
+ /**
+ * Sets the number of samples in the buffer to the new limit.
+ * @param newLimit The new limit of the sample buffer.
+ */
+ public void limit(int newLimit) {
+ byteBuffer.limit(newLimit * sampleType * channelCount);
+ }
+
+ /**
+ * Returns the current position in the buffer per channel.
+ * @return The position of the sample buffer.
+ */
+ public int position() {
+ return byteBuffer.position() / sampleType / channelCount;
+ }
+
+ /**
+ *Sets the position of the sample buffer to the new index.
+ * @param newPosition The new position of the sample buffer.
+ */
+ public void position(int newPosition) {
+ byteBuffer.position(newPosition * sampleType * channelCount);
+ }
+
+ /**
+ * Returns the sample of the current position and then increments the position.
+ * The sample returned is a mixed sample getting all samples from each channel.
+ * @return The mixed sample.
+ */
+ public double get() {
+ // convenient method to avoid duplicate code: we use -1 index to call get()
+ return get(-1);
+ }
+
+ /**
+ * Returns the sample from the given index in the buffer.
+ * If the buffer's channel count is > 1 the sample returned
+ * is a mixed sample getting all samples from each channel.
+ * @param index The index of the sample requested.
+ * @return The sample requested.
+ */
+ public double get(int index) {
+ int internalIndex = index * channelCount * sampleType;
+
+ switch (sampleType) {
+ case SampleType.UNSIGNED_8_BIT: {
+ double avg = 0;
+
+ // get a sample mix to mono from the index
+ for (int i = 0; i < channelCount; i++) {
+ byte b = index == -1 ? byteBuffer.get() : byteBuffer.get(internalIndex + i * sampleType);
+ int a = b & 0xff; // convert the 8 bits into int so we can calc > 127
+ avg += a / (double)channelCount;
+ }
+
+ return avg * 2.0 / 255.0 - 1.0; //magic? check out SampleType
+ }
+ case SampleType.SIGNED_16_BIT: {
+ double avg = 0;
+
+ // get a sample mix to mono from the index
+ for (int i = 0; i < channelCount; i++) {
+ short a = index == -1 ? byteBuffer.getShort() : byteBuffer.getShort(internalIndex + i * sampleType);
+ avg += a / (double)channelCount;
+ }
+
+ return (avg + 32768.0) * 2.0 / 65535.0 - 1.0; //magic? check out SampleType
+ }
+ case SampleType.FLOAT: {
+ double avg = 0;
+
+ // get a sample mix to mono from the index
+ for (int i = 0; i < channelCount; i++) {
+ double a = index == -1 ? byteBuffer.getFloat() : byteBuffer.getFloat(internalIndex + i * sampleType);
+ avg += a / (double)channelCount;
+ }
+
+ return avg;
+ }
+ default: {
+ Log.e(TAG, "SampleBuffer.get(int): The sample type is not known: " + sampleType);
+ return 0.0;
+ }
+ }
+ }
+
+ /**
+ * Puts a sample to the current position and increments the position.
+ * @param sample The sample to put into the buffer.
+ */
+ public void put(double sample) {
+ put(-1, sample);
+ }
+
+ /**
+ * Puts a sample to the given index in the buffer.
+ * If the buffer's channel count is > 1 the sample
+ * will be stored in each channel at the given index.
+ * @param index The index to put the sample.
+ * @param sample The sample to store in the buffer.
+ */
+ public void put(int index, double sample) {
+ int internalIndex = index * channelCount * sampleType;
+ switch (sampleType) {
+ case SampleType.UNSIGNED_8_BIT: {
+ int a = (int)Math.round((sample + 1.0) * 255.0 / 2.0); //magic? check out SampleType
+ byte b = (byte)a;
+ if (index == -1) {
+ for (int i = 0; i < channelCount; i++) {
+ byteBuffer.put(b);
+ }
+ } else {
+ for (int i = 0; i < channelCount; i++) {
+ byteBuffer.put(internalIndex + i * sampleType, b);
+ }
+ }
+ break;
+ }
+ case SampleType.SIGNED_16_BIT: {
+ short a = (short)Math.round((sample + 1.0) * 65535 / 2.0 - 32767.0); //magic? check out SampleType
+ if (index == -1) {
+ for (int i = 0; i < channelCount; i++) {
+ byteBuffer.putShort(a);
+ }
+ } else {
+ for (int i = 0; i < channelCount; i++) {
+ byteBuffer.putShort(internalIndex + i * sampleType, a);
+ }
+ }
+ break;
+ }
+ case SampleType.FLOAT: {
+ if (index == -1) {
+ for (int i = 0; i < channelCount; i++) {
+ byteBuffer.putFloat((float) sample);
+ }
+ } else {
+ for (int i = 0; i < channelCount; i++) {
+ byteBuffer.putFloat(internalIndex + i * sampleType, (float) sample);
+ }
+ }
+ break;
+ }
+ default: {
+ Log.e(TAG, "SampleBuffer.set(int): The sample type is not known: " + sampleType);
+ }
+ }
+ }
+
+ /**
+ * Returns the raw byte buffer managed by this sample buffer.
+ * @return The raw byte buffer managed by this sample buffer.
+ */
+ public ByteBuffer getByteBuffer() {
+ return byteBuffer;
+ }
+
+ /**
+ * Returns a copy of the bytes from position 0 to the current limit.
+ * @return A copy of the bytes.
+ */
+ public byte[] getBytes() {
+ int limit = byteBuffer.limit();
+ byte[] bytes = new byte[limit];
+
+ for (int i = 0; i < limit; ++i) {
+ bytes[i] = byteBuffer.get(i);
+ }
+
+ return bytes;
+ }
+
+ /**
+ * The presentation time of this sample buffer.
+ * @return The presentation time of this sample buffer.
+ */
+ public long getPresentationTimeUs() {
+ return presentationTimeUs;
+ }
+}