summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNicoleYarroch <nicole@livio.io>2017-10-10 11:07:30 -0400
committerNicoleYarroch <nicole@livio.io>2017-10-10 11:07:30 -0400
commitc938af96a6920d7e6e3b0330f8614489287dfa18 (patch)
treef4c215bf51d23dc6e963e91a1c989bbfb3fe4e8f
parent912aab4510ac08c2ebdacb10c643fce785980804 (diff)
downloadsdl_ios-c938af96a6920d7e6e3b0330f8614489287dfa18.tar.gz
Removed HMI level NONE checks
- reverted to Joel’s lifecycle state changes - fixed the video start service so it is only sent after video capabilites response is returned Signed-off-by: NicoleYarroch <nicole@livio.io>
-rw-r--r--SmartDeviceLink/SDLLifecycleManager.m21
-rw-r--r--SmartDeviceLink/SDLStreamingMediaLifecycleManager.m102
-rw-r--r--SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m820
3 files changed, 462 insertions, 481 deletions
diff --git a/SmartDeviceLink/SDLLifecycleManager.m b/SmartDeviceLink/SDLLifecycleManager.m
index 30b3576b3..6025e72ab 100644
--- a/SmartDeviceLink/SDLLifecycleManager.m
+++ b/SmartDeviceLink/SDLLifecycleManager.m
@@ -71,7 +71,6 @@ SDLLifecycleState *const SDLLifecycleStateReady = @"Ready";
// Private properties
@property (copy, nonatomic) SDLManagerReadyBlock readyHandler;
-@property (assign, nonatomic) BOOL firstHMINonNoneOccurred;
@end
@@ -105,7 +104,6 @@ SDLLifecycleState *const SDLLifecycleStateReady = @"Ready";
_notificationDispatcher = [[SDLNotificationDispatcher alloc] init];
_responseDispatcher = [[SDLResponseDispatcher alloc] initWithNotificationDispatcher:_notificationDispatcher];
_registerResponse = nil;
- _firstHMINonNoneOccurred = NO;
// Managers
_fileManager = [[SDLFileManager alloc] initWithConnectionManager:self];
@@ -184,6 +182,10 @@ SDLLifecycleState *const SDLLifecycleStateReady = @"Ready";
self.proxy = [SDLProxyFactory buildSDLProxyWithListener:self.notificationDispatcher];
}
#pragma clang diagnostic pop
+
+ if (self.streamManager != nil) {
+ [self.streamManager startWithProtocol:self.proxy.protocol];
+ }
}
- (void)didEnterStateStopped {
@@ -441,15 +443,6 @@ SDLLifecycleState *const SDLLifecycleStateReady = @"Ready";
return YES;
}
-- (void)sdl_onFirstHMINonNone {
- // If we are a nav / projection app and desire to stream, we need to be in HMI background, limited, or full and perform additional setup when that occurs
- if (self.streamManager == nil) {
- return;
- }
-
- [self.streamManager startWithProtocol:self.proxy.protocol];
-}
-
#pragma mark SDL notification observers
@@ -460,7 +453,6 @@ SDLLifecycleState *const SDLLifecycleStateReady = @"Ready";
- (void)transportDidDisconnect {
SDLLogD(@"Transport Disconnected");
- self.firstHMINonNoneOccurred = NO;
if (self.lifecycleState == SDLLifecycleStateUnregistering || self.lifecycleState == SDLLifecycleStateStopped) {
[self.lifecycleStateMachine transitionToState:SDLLifecycleStateStopped];
@@ -486,11 +478,6 @@ SDLLifecycleState *const SDLLifecycleStateReady = @"Ready";
SDLLogD(@"HMI level changed from %@ to %@", oldHMILevel, self.hmiLevel);
- if (!self.firstHMINonNoneOccurred && ![self.hmiLevel isEqualToEnum:SDLHMILevelNone]) {
- self.firstHMINonNoneOccurred = YES;
- [self sdl_onFirstHMINonNone];
- }
-
if ([self.lifecycleStateMachine isCurrentState:SDLLifecycleStateSettingUpHMI]) {
[self.lifecycleStateMachine transitionToState:SDLLifecycleStateReady];
}
diff --git a/SmartDeviceLink/SDLStreamingMediaLifecycleManager.m b/SmartDeviceLink/SDLStreamingMediaLifecycleManager.m
index 0834a9322..cae582d7c 100644
--- a/SmartDeviceLink/SDLStreamingMediaLifecycleManager.m
+++ b/SmartDeviceLink/SDLStreamingMediaLifecycleManager.m
@@ -87,8 +87,6 @@ typedef void(^SDLVideoCapabilityResponseHandler)(SDLVideoStreamingCapability *_N
@property (assign, nonatomic) CMTime lastPresentationTimestamp;
-@property (assign, nonatomic) BOOL receivedVideoCapabilityResponse;
-
@end
@@ -142,7 +140,6 @@ typedef void(^SDLVideoCapabilityResponseHandler)(SDLVideoStreamingCapability *_N
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sdl_appStateDidUpdate:) name:UIApplicationWillResignActiveNotification object:nil];
_lastPresentationTimestamp = kCMTimeInvalid;
- _receivedVideoCapabilityResponse = NO;
return self;
}
@@ -153,46 +150,6 @@ typedef void(^SDLVideoCapabilityResponseHandler)(SDLVideoStreamingCapability *_N
if (![self.protocol.protocolDelegateTable containsObject:self]) {
[self.protocol.protocolDelegateTable addObject:self];
}
-
- __weak typeof(self) weakSelf = self;
- [self sdl_requestVideoCapabilities:^(SDLVideoStreamingCapability * _Nullable capability) {
- SDLLogD(@"Received video capability response");
- SDLLogV(@"Capability: %@", capability);
-
- self.receivedVideoCapabilityResponse = YES;
-
- if (capability != nil) {
- // If we got a response, get our preferred formats and resolutions
- weakSelf.preferredFormats = capability.supportedFormats;
- weakSelf.preferredResolutions = @[capability.preferredResolution];
-
- if (weakSelf.dataSource != nil) {
- SDLLogV(@"Calling data source for modified preferred formats and resolutions");
- weakSelf.preferredFormats = [weakSelf.dataSource preferredVideoFormatOrderFromHeadUnitPreferredOrder:weakSelf.preferredFormats];
- weakSelf.preferredResolutions = [weakSelf.dataSource resolutionFromHeadUnitPreferredResolution:weakSelf.preferredResolutions.firstObject];
- }
-
- if (weakSelf.hapticInterface != nil) {
- weakSelf.hapticInterface.enableHapticDataRequests = capability.hapticSpatialDataSupported.boolValue;
- }
-
- SDLLogD(@"Got specialized video capabilites, preferred formats: %@, resolutions: %@ haptics enabled %@", weakSelf.preferredFormats, weakSelf.preferredResolutions, (capability.hapticSpatialDataSupported.boolValue ? @"YES" : @"NO"));
- } else {
- // If we can't get capabilities, we're assuming it's H264 RAW at whatever the display capabilities said in the RAIR. We also aren't going to call the data source because they have no options.
- SDLVideoStreamingFormat *format = [[SDLVideoStreamingFormat alloc] initWithCodec:SDLVideoStreamingCodecH264 protocol:SDLVideoStreamingProtocolRAW];
- SDLImageResolution *resolution = [[SDLImageResolution alloc] initWithWidth:weakSelf.screenSize.width height:weakSelf.screenSize.height];
- weakSelf.preferredFormats = @[format];
- weakSelf.preferredResolutions = @[resolution];
-
- if (weakSelf.hapticInterface != nil) {
- weakSelf.hapticInterface.enableHapticDataRequests = NO;
- }
-
- SDLLogD(@"Using generic video capabilites, preferred formats: %@, resolutions: %@, haptics disabled", weakSelf.preferredFormats, weakSelf.preferredResolutions);
- }
-
- [weakSelf sdl_startVideoSession];
- }];
}
- (void)stop {
@@ -333,8 +290,6 @@ typedef void(^SDLVideoCapabilityResponseHandler)(SDLVideoStreamingCapability *_N
_videoEncrypted = NO;
_videoFormat = nil;
- _receivedVideoCapabilityResponse = NO;
-
if (_videoEncoder != nil) {
[_videoEncoder stop];
_videoEncoder = nil;
@@ -352,7 +307,43 @@ typedef void(^SDLVideoCapabilityResponseHandler)(SDLVideoStreamingCapability *_N
SDLLogD(@"Video stream starting");
self.restartVideoStream = NO;
- [self sdl_sendVideoStartService];
+ __weak typeof(self) weakSelf = self;
+ [self sdl_requestVideoCapabilities:^(SDLVideoStreamingCapability * _Nullable capability) {
+ SDLLogD(@"Received video capability response");
+ SDLLogV(@"Capability: %@", capability);
+
+ if (capability != nil) {
+ // If we got a response, get our preferred formats and resolutions
+ weakSelf.preferredFormats = capability.supportedFormats;
+ weakSelf.preferredResolutions = @[capability.preferredResolution];
+
+ if (weakSelf.dataSource != nil) {
+ SDLLogV(@"Calling data source for modified preferred formats and resolutions");
+ weakSelf.preferredFormats = [weakSelf.dataSource preferredVideoFormatOrderFromHeadUnitPreferredOrder:weakSelf.preferredFormats];
+ weakSelf.preferredResolutions = [weakSelf.dataSource resolutionFromHeadUnitPreferredResolution:weakSelf.preferredResolutions.firstObject];
+ }
+
+ if (weakSelf.hapticInterface != nil) {
+ weakSelf.hapticInterface.enableHapticDataRequests = capability.hapticSpatialDataSupported.boolValue;
+ }
+
+ SDLLogD(@"Got specialized video capabilites, preferred formats: %@, resolutions: %@ haptics enabled %@", weakSelf.preferredFormats, weakSelf.preferredResolutions, (capability.hapticSpatialDataSupported.boolValue ? @"YES" : @"NO"));
+ } else {
+ // If we can't get capabilities, we're assuming it's H264 RAW at whatever the display capabilities said in the RAIR. We also aren't going to call the data source because they have no options.
+ SDLVideoStreamingFormat *format = [[SDLVideoStreamingFormat alloc] initWithCodec:SDLVideoStreamingCodecH264 protocol:SDLVideoStreamingProtocolRAW];
+ SDLImageResolution *resolution = [[SDLImageResolution alloc] initWithWidth:weakSelf.screenSize.width height:weakSelf.screenSize.height];
+ weakSelf.preferredFormats = @[format];
+ weakSelf.preferredResolutions = @[resolution];
+
+ if (weakSelf.hapticInterface != nil) {
+ weakSelf.hapticInterface.enableHapticDataRequests = NO;
+ }
+
+ SDLLogD(@"Using generic video capabilites, preferred formats: %@, resolutions: %@, haptics disabled", weakSelf.preferredFormats, weakSelf.preferredResolutions);
+ }
+
+ [self sdl_sendVideoStartService];
+ }];
}
- (void)didEnterStateVideoStreamReady {
@@ -617,29 +608,26 @@ typedef void(^SDLVideoCapabilityResponseHandler)(SDLVideoStreamingCapability *_N
- (void)sdl_startVideoSession {
SDLLogV(@"Attempting to start video session");
- if (!self.receivedVideoCapabilityResponse) {
- // The video stream manager should send a video start service after SDL Core returns a video capability response.
- return;
- }
-
if (!self.isStreamingSupported) {
+ SDLLogV(@"Streaming is not supported. Video start service request will not be sent.");
return;
}
if (self.shouldRestartVideoStream && [self.videoStreamStateMachine isCurrentState:SDLVideoStreamStateReady]) {
+ SDLLogV(@"Video needs to be restarted. Stopping video stream.");
[self sdl_stopVideoSession];
return;
}
if ([self.videoStreamStateMachine isCurrentState:SDLVideoStreamStateStopped]
- && self.isHmiStateVideoStreamCapable
- && self.isAppStateVideoStreamCapable) {
+ && self.isHmiStateVideoStreamCapable) {
+ SDLLogV(@"Sending a video start service request.");
[self.videoStreamStateMachine transitionToState:SDLVideoStreamStateStarting];
} else {
- SDLLogE(@"Unable to start video stream\n"
- "State: %@\n"
- "HMI state: %@\n"
- "App state: %@", self.videoStreamStateMachine.currentState, self.hmiLevel, self.appStateMachine.currentState);
+ SDLLogE(@"Unable to send video start service request\n"
+ "Video State must be SDLVideoStreamStateStopped: %@\n"
+ "HMI state must be LIMITED or FULL: %@\n",
+ self.videoStreamStateMachine.currentState, self.hmiLevel);
}
}
diff --git a/SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m b/SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m
index 93ec5eca7..510c36bc1 100644
--- a/SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m
+++ b/SmartDeviceLinkTests/SDLStreamingMediaLifecycleManagerSpec.m
@@ -50,27 +50,27 @@ describe(@"the streaming media manager", ^{
__block SDLFakeStreamingManagerDataSource *testDataSource = [[SDLFakeStreamingManagerDataSource alloc] init];
__block NSString *someBackgroundTitleString = nil;
__block TestConnectionManager *testConnectionManager = nil;
-
+
__block void (^sendNotificationForHMILevel)(SDLHMILevel hmiLevel) = ^(SDLHMILevel hmiLevel) {
SDLOnHMIStatus *hmiStatus = [[SDLOnHMIStatus alloc] init];
hmiStatus.hmiLevel = hmiLevel;
SDLRPCNotificationNotification *notification = [[SDLRPCNotificationNotification alloc] initWithName:SDLDidChangeHMIStatusNotification object:self rpcNotification:hmiStatus];
[[NSNotificationCenter defaultCenter] postNotification:notification];
-
+
[NSThread sleepForTimeInterval:0.3];
};
-
+
beforeEach(^{
testConfiguration.customVideoEncoderSettings = @{
- (__bridge NSString *)kVTCompressionPropertyKey_ExpectedFrameRate : @1
- };
+ (__bridge NSString *)kVTCompressionPropertyKey_ExpectedFrameRate : @1
+ };
testConfiguration.dataSource = testDataSource;
testConfiguration.window = testWindow;
someBackgroundTitleString = @"Open Test App";
testConnectionManager = [[TestConnectionManager alloc] init];
streamingLifecycleManager = [[SDLStreamingMediaLifecycleManager alloc] initWithConnectionManager:testConnectionManager configuration:testConfiguration];
});
-
+
it(@"should initialize properties", ^{
expect(streamingLifecycleManager.touchManager).toNot(beNil());
expect(streamingLifecycleManager.hapticInterface).toNot(beNil());
@@ -94,20 +94,20 @@ describe(@"the streaming media manager", ^{
expect(streamingLifecycleManager.preferredFormatIndex).to(equal(0));
expect(streamingLifecycleManager.preferredResolutionIndex).to(equal(0));
});
-
+
describe(@"when started", ^{
__block BOOL readyHandlerSuccess = NO;
__block NSError *readyHandlerError = nil;
-
+
__block id protocolMock = OCMClassMock([SDLAbstractProtocol class]);
-
+
beforeEach(^{
readyHandlerSuccess = NO;
readyHandlerError = nil;
-
+
[streamingLifecycleManager startWithProtocol:protocolMock];
});
-
+
it(@"should be ready to stream", ^{
expect(@(streamingLifecycleManager.isStreamingSupported)).to(equal(@NO));
expect(@(streamingLifecycleManager.isVideoConnected)).to(equal(@NO));
@@ -122,63 +122,56 @@ describe(@"the streaming media manager", ^{
expect(streamingLifecycleManager.currentVideoStreamState).to(match(SDLVideoStreamStateStopped));
});
- it(@"should send out a video capabilities request", ^{
- expect(testConnectionManager.receivedRequests.lastObject).to(beAnInstanceOf([SDLGetSystemCapability class]));
-
- SDLGetSystemCapability *getCapability = (SDLGetSystemCapability *)testConnectionManager.receivedRequests.lastObject;
- expect(getCapability.systemCapabilityType).to(equal(SDLSystemCapabilityTypeVideoStreaming));
- });
-
describe(@"after receiving a register app interface notification", ^{
__block SDLRegisterAppInterfaceResponse *someRegisterAppInterfaceResponse = nil;
__block SDLDisplayCapabilities *someDisplayCapabilities = nil;
__block SDLScreenParams *someScreenParams = nil;
__block SDLImageResolution *someImageResolution = nil;
-
+
beforeEach(^{
someImageResolution = [[SDLImageResolution alloc] init];
someImageResolution.resolutionWidth = @(600);
someImageResolution.resolutionHeight = @(100);
-
+
someScreenParams = [[SDLScreenParams alloc] init];
someScreenParams.resolution = someImageResolution;
});
-
+
context(@"that does not support graphics", ^{
beforeEach(^{
someDisplayCapabilities = [[SDLDisplayCapabilities alloc] init];
someDisplayCapabilities.graphicSupported = @NO;
-
+
someDisplayCapabilities.screenParams = someScreenParams;
-
+
someRegisterAppInterfaceResponse = [[SDLRegisterAppInterfaceResponse alloc] init];
someRegisterAppInterfaceResponse.displayCapabilities = someDisplayCapabilities;
SDLRPCResponseNotification *notification = [[SDLRPCResponseNotification alloc] initWithName:SDLDidReceiveRegisterAppInterfaceResponse object:self rpcResponse:someRegisterAppInterfaceResponse];
-
+
[[NSNotificationCenter defaultCenter] postNotification:notification];
[NSThread sleepForTimeInterval:0.1];
});
-
+
it(@"should not support streaming", ^{
expect(@(streamingLifecycleManager.isStreamingSupported)).to(equal(@NO));
});
});
-
+
context(@"that supports graphics", ^{
beforeEach(^{
someDisplayCapabilities = [[SDLDisplayCapabilities alloc] init];
someDisplayCapabilities.graphicSupported = @YES;
-
+
someDisplayCapabilities.screenParams = someScreenParams;
-
+
someRegisterAppInterfaceResponse = [[SDLRegisterAppInterfaceResponse alloc] init];
someRegisterAppInterfaceResponse.displayCapabilities = someDisplayCapabilities;
SDLRPCResponseNotification *notification = [[SDLRPCResponseNotification alloc] initWithName:SDLDidReceiveRegisterAppInterfaceResponse object:self rpcResponse:someRegisterAppInterfaceResponse];
-
+
[[NSNotificationCenter defaultCenter] postNotification:notification];
[NSThread sleepForTimeInterval:0.1];
});
-
+
it(@"should support streaming", ^{
expect(@(streamingLifecycleManager.isStreamingSupported)).to(equal(@YES));
expect(@(CGSizeEqualToSize(streamingLifecycleManager.screenSize, CGSizeMake(600, 100)))).to(equal(@YES));
@@ -186,491 +179,504 @@ describe(@"the streaming media manager", ^{
});
});
- describe(@"after sending GetSystemCapabilities", ^{
- context(@"and receiving an error response", ^{
- // This happens if the HU doesn't understand GetSystemCapabilities
- beforeEach(^{
- SDLGenericResponse *genericResponse = [[SDLGenericResponse alloc] init];
- genericResponse.resultCode = SDLResultInvalidData;
+ describe(@"if the app state is active", ^{
+ __block id streamStub = nil;
- [testConnectionManager respondToLastRequestWithResponse:genericResponse];
- });
+ beforeEach(^{
+ streamStub = OCMPartialMock(streamingLifecycleManager);
- it(@"should have correct format and resolution", ^{
- expect(streamingLifecycleManager.preferredFormats).to(haveCount(1));
- expect(streamingLifecycleManager.preferredFormats.firstObject.codec).to(equal(SDLVideoStreamingCodecH264));
- expect(streamingLifecycleManager.preferredFormats.firstObject.protocol).to(equal(SDLVideoStreamingProtocolRAW));
+ OCMStub([streamStub isStreamingSupported]).andReturn(YES);
- expect(streamingLifecycleManager.preferredResolutions).to(haveCount(1));
- expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionWidth).to(equal(0));
- expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionHeight).to(equal(0));
- });
+ [streamingLifecycleManager.appStateMachine setToState:SDLAppStateActive fromOldState:nil callEnterTransition:NO];
});
- context(@"and receiving a response", ^{
- __block SDLImageResolution *resolution = nil;
- __block int32_t maxBitrate = 0;
- __block NSArray<SDLVideoStreamingFormat *> *testFormats = nil;
- __block BOOL testHapticsSupported = NO;
-
+ describe(@"and both streams are open", ^{
beforeEach(^{
- SDLGetSystemCapabilityResponse *response = [[SDLGetSystemCapabilityResponse alloc] init];
- response.success = @YES;
- response.systemCapability = [[SDLSystemCapability alloc] init];
- response.systemCapability.systemCapabilityType = SDLSystemCapabilityTypeVideoStreaming;
-
- resolution = [[SDLImageResolution alloc] initWithWidth:42 height:69];
- maxBitrate = 12345;
- testFormats = @[[[SDLVideoStreamingFormat alloc] initWithCodec:SDLVideoStreamingCodecH265 protocol:SDLVideoStreamingProtocolRTMP], [[SDLVideoStreamingFormat alloc] initWithCodec:SDLVideoStreamingCodecH264 protocol:SDLVideoStreamingProtocolRTP]];
- testHapticsSupported = YES;
- response.systemCapability.videoStreamingCapability = [[SDLVideoStreamingCapability alloc] initWithPreferredResolution:resolution maxBitrate:maxBitrate supportedFormats:testFormats hapticDataSupported:testHapticsSupported];
- [testConnectionManager respondToLastRequestWithResponse:response];
- });
-
- it(@"should have set correct data", ^{
- // Correct formats should be retrieved from the data source
- expect(streamingLifecycleManager.preferredResolutions).to(haveCount(1));
- expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionWidth).to(equal(resolution.resolutionWidth));
- expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionHeight).to(equal(resolution.resolutionHeight));
-
- expect(streamingLifecycleManager.preferredFormats).to(haveCount(streamingLifecycleManager.supportedFormats.count + 1));
- expect(streamingLifecycleManager.preferredFormats.firstObject.codec).to(equal(testDataSource.extraFormat.codec));
- expect(streamingLifecycleManager.preferredFormats.firstObject.protocol).to(equal(testDataSource.extraFormat.protocol));
-
- // The haptic manager should be enabled
- expect(streamingLifecycleManager.hapticInterface.enableHapticDataRequests).to(equal(YES));
+ [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateReady fromOldState:nil callEnterTransition:NO];
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateReady fromOldState:nil callEnterTransition:NO];
});
- describe(@"if the app state is active", ^{
- __block id streamStub = nil;
-
+ describe(@"and the hmi state is limited", ^{
beforeEach(^{
- streamStub = OCMPartialMock(streamingLifecycleManager);
-
- OCMStub([streamStub isStreamingSupported]).andReturn(YES);
-
- [streamingLifecycleManager.appStateMachine setToState:SDLAppStateActive fromOldState:nil callEnterTransition:NO];
+ streamingLifecycleManager.hmiLevel = SDLHMILevelLimited;
});
- describe(@"and both streams are open", ^{
- beforeEach(^{
- [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateReady fromOldState:nil callEnterTransition:NO];
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateReady fromOldState:nil callEnterTransition:NO];
+ describe(@"and the hmi state changes to", ^{
+ context(@"none", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelNone);
+ });
+
+ it(@"should close only the video stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
+ });
});
- describe(@"and the hmi state is limited", ^{
+ context(@"background", ^{
beforeEach(^{
- streamingLifecycleManager.hmiLevel = SDLHMILevelLimited;
+ sendNotificationForHMILevel(SDLHMILevelBackground);
});
- describe(@"and the hmi state changes to", ^{
- context(@"none", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelNone);
- });
-
- it(@"should close only the video stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
- });
- });
-
- context(@"background", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelBackground);
- });
-
- it(@"should close only the video stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
- });
- });
-
- context(@"limited", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelLimited);
- });
-
- it(@"should not close either stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
- });
- });
-
- context(@"full", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelFull);
- });
-
- it(@"should not close either stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
- });
- });
+ it(@"should close only the video stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
+ });
+ });
+
+ context(@"limited", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelLimited);
});
- describe(@"and the app state changes to", ^{
- context(@"inactive", ^{
- beforeEach(^{
- [streamingLifecycleManager.appStateMachine setToState:SDLAppStateInactive fromOldState:nil callEnterTransition:YES];
- });
-
- it(@"should flag to restart the video stream", ^{
- expect(@(streamingLifecycleManager.shouldRestartVideoStream)).to(equal(@YES));
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
- });
- });
+ it(@"should not close either stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
});
});
- describe(@"and the hmi state is full", ^{
+ context(@"full", ^{
beforeEach(^{
- streamingLifecycleManager.hmiLevel = SDLHMILevelFull;
+ sendNotificationForHMILevel(SDLHMILevelFull);
});
- context(@"and hmi state changes to none", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelNone);
- });
-
- it(@"should close only the video stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
- });
+ it(@"should not close either stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
});
+ });
+ });
- context(@"and hmi state changes to background", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelBackground);
- });
+ describe(@"and the app state changes to", ^{
+ context(@"inactive", ^{
+ beforeEach(^{
+ [streamingLifecycleManager.appStateMachine setToState:SDLAppStateInactive fromOldState:nil callEnterTransition:YES];
+ });
- it(@"should close only the video stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
- });
+ it(@"should flag to restart the video stream", ^{
+ expect(@(streamingLifecycleManager.shouldRestartVideoStream)).to(equal(@YES));
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
});
+ });
+ });
+ });
- context(@"and hmi state changes to limited", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelLimited);
- });
+ describe(@"and the hmi state is full", ^{
+ beforeEach(^{
+ streamingLifecycleManager.hmiLevel = SDLHMILevelFull;
+ });
- it(@"should not close either stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
- });
- });
+ context(@"and hmi state changes to none", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelNone);
+ });
- context(@"and hmi state changes to full", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelFull);
- });
+ it(@"should close only the video stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
+ });
+ });
- it(@"should not close either stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
- });
- });
+ context(@"and hmi state changes to background", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelBackground);
+ });
+
+ it(@"should close only the video stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateShuttingDown));
});
});
- describe(@"and both streams are closed", ^{
+ context(@"and hmi state changes to limited", ^{
beforeEach(^{
- [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateStopped fromOldState:nil callEnterTransition:NO];
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStopped fromOldState:nil callEnterTransition:NO];
+ sendNotificationForHMILevel(SDLHMILevelLimited);
});
- describe(@"and the hmi state is none", ^{
- beforeEach(^{
- streamingLifecycleManager.hmiLevel = SDLHMILevelNone;
- });
+ it(@"should not close either stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
+ });
- context(@"and hmi state changes to none", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelNone);
- });
+ context(@"and hmi state changes to full", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelFull);
+ });
- it(@"should only start the audio stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
- });
- });
+ it(@"should not close either stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
+ });
+ });
+ });
- context(@"and hmi state changes to background", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelBackground);
- });
+ describe(@"and both streams are closed", ^{
+ beforeEach(^{
+ [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateStopped fromOldState:nil callEnterTransition:NO];
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStopped fromOldState:nil callEnterTransition:NO];
+ });
- it(@"should only start the audio stream", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
- });
- });
-
- context(@"and hmi state changes to limited", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelLimited);
- });
-
- it(@"should start both streams", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStarting));
- });
- });
-
- context(@"and hmi state changes to full", ^{
- beforeEach(^{
- sendNotificationForHMILevel(SDLHMILevelFull);
- });
-
- it(@"should start both streams", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStarting));
- });
-
- it(@"should have decided upon the correct preferred format and resolution", ^{
- SDLVideoStreamingFormat *preferredFormat = streamingLifecycleManager.preferredFormats[streamingLifecycleManager.preferredFormatIndex];
- expect(preferredFormat.codec).to(equal(SDLVideoStreamingCodecH264));
- expect(preferredFormat.protocol).to(equal(SDLVideoStreamingProtocolRTP));
-
- SDLImageResolution *preferredResolution = streamingLifecycleManager.preferredResolutions[streamingLifecycleManager.preferredResolutionIndex];
- expect(preferredResolution.resolutionHeight).to(equal(@69));
- expect(preferredResolution.resolutionWidth).to(equal(@42));
- });
- });
+ describe(@"and the hmi state is none", ^{
+ beforeEach(^{
+ streamingLifecycleManager.hmiLevel = SDLHMILevelNone;
+ });
+
+ context(@"and hmi state changes to none", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelNone);
+ });
+
+ it(@"should only start the audio stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ });
+ });
+
+ context(@"and hmi state changes to background", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelBackground);
+ });
+
+ it(@"should only start the audio stream", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ });
+ });
+
+ context(@"and hmi state changes to limited", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelLimited);
+ });
+
+ it(@"should start both streams", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStarting));
+ });
+ });
+
+ context(@"and hmi state changes to full", ^{
+ beforeEach(^{
+ sendNotificationForHMILevel(SDLHMILevelFull);
+ });
+
+ it(@"should start both streams", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStarting));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStarting));
});
});
});
});
});
- describe(@"after receiving a Video Start ACK", ^{
- __block SDLProtocolHeader *testVideoHeader = nil;
- __block SDLProtocolMessage *testVideoMessage = nil;
- __block SDLControlFramePayloadVideoStartServiceAck *testVideoStartServicePayload = nil;
- __block int64_t testMTU = 789456;
- __block int32_t testVideoHeight = 42;
- __block int32_t testVideoWidth = 32;
- __block SDLVideoStreamingCodec testVideoCodec = SDLVideoStreamingCodecH264;
- __block SDLVideoStreamingProtocol testVideoProtocol = SDLVideoStreamingProtocolRTP;
-
+ describe(@"sending a video capabilities request", ^{
beforeEach(^{
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:NO];
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:YES];
+ });
- testVideoHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
- testVideoHeader.frameType = SDLFrameTypeSingle;
- testVideoHeader.frameData = SDLFrameInfoStartServiceACK;
- testVideoHeader.encrypted = YES;
- testVideoHeader.serviceType = SDLServiceTypeVideo;
+ it(@"should send out a video capabilities request", ^{
+ expect(testConnectionManager.receivedRequests.lastObject).to(beAnInstanceOf([SDLGetSystemCapability class]));
+
+ SDLGetSystemCapability *getCapability = (SDLGetSystemCapability *)testConnectionManager.receivedRequests.lastObject;
+ expect(getCapability.systemCapabilityType).to(equal(SDLSystemCapabilityTypeVideoStreaming));
});
- context(@"with data", ^{
- beforeEach(^{
- testVideoStartServicePayload = [[SDLControlFramePayloadVideoStartServiceAck alloc] initWithMTU:testMTU height:testVideoHeight width:testVideoWidth protocol:testVideoProtocol codec:testVideoCodec];
- testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:testVideoStartServicePayload.data];
- [streamingLifecycleManager handleProtocolStartServiceACKMessage:testVideoMessage];
+ describe(@"after sending GetSystemCapabilities", ^{
+ context(@"and receiving an error response", ^{
+ // This happens if the HU doesn't understand GetSystemCapabilities
+ beforeEach(^{
+ SDLGenericResponse *genericResponse = [[SDLGenericResponse alloc] init];
+ genericResponse.resultCode = SDLResultInvalidData;
+
+ [testConnectionManager respondToLastRequestWithResponse:genericResponse];
+ });
+
+ it(@"should have correct format and resolution", ^{
+ expect(streamingLifecycleManager.preferredFormats).to(haveCount(1));
+ expect(streamingLifecycleManager.preferredFormats.firstObject.codec).to(equal(SDLVideoStreamingCodecH264));
+ expect(streamingLifecycleManager.preferredFormats.firstObject.protocol).to(equal(SDLVideoStreamingProtocolRAW));
+
+ expect(streamingLifecycleManager.preferredResolutions).to(haveCount(1));
+ expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionWidth).to(equal(0));
+ expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionHeight).to(equal(0));
+ });
});
- it(@"should have set all the right properties", ^{
- expect([[SDLGlobals sharedGlobals] mtuSizeForServiceType:SDLServiceTypeVideo]).to(equal(testMTU));
- expect(CGSizeEqualToSize(streamingLifecycleManager.screenSize, CGSizeMake(testVideoWidth, testVideoHeight))).to(equal(YES));
- expect(streamingLifecycleManager.videoEncrypted).to(equal(YES));
- expect(streamingLifecycleManager.videoFormat).to(equal([[SDLVideoStreamingFormat alloc] initWithCodec:testVideoCodec protocol:testVideoProtocol]));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ context(@"and receiving a response", ^{
+ __block SDLImageResolution *resolution = nil;
+ __block int32_t maxBitrate = 0;
+ __block NSArray<SDLVideoStreamingFormat *> *testFormats = nil;
+ __block BOOL testHapticsSupported = NO;
+
+ beforeEach(^{
+ SDLGetSystemCapabilityResponse *response = [[SDLGetSystemCapabilityResponse alloc] init];
+ response.success = @YES;
+ response.systemCapability = [[SDLSystemCapability alloc] init];
+ response.systemCapability.systemCapabilityType = SDLSystemCapabilityTypeVideoStreaming;
+
+ resolution = [[SDLImageResolution alloc] initWithWidth:42 height:69];
+ maxBitrate = 12345;
+ testFormats = @[[[SDLVideoStreamingFormat alloc] initWithCodec:SDLVideoStreamingCodecH265 protocol:SDLVideoStreamingProtocolRTMP], [[SDLVideoStreamingFormat alloc] initWithCodec:SDLVideoStreamingCodecH264 protocol:SDLVideoStreamingProtocolRTP]];
+ testHapticsSupported = YES;
+ response.systemCapability.videoStreamingCapability = [[SDLVideoStreamingCapability alloc] initWithPreferredResolution:resolution maxBitrate:maxBitrate supportedFormats:testFormats hapticDataSupported:testHapticsSupported];
+ [testConnectionManager respondToLastRequestWithResponse:response];
+ });
+
+ it(@"should have correct data from the data source", ^{
+ // Correct formats should be retrieved from the data source
+ expect(streamingLifecycleManager.preferredResolutions).to(haveCount(1));
+ expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionWidth).to(equal(resolution.resolutionWidth));
+ expect(streamingLifecycleManager.preferredResolutions.firstObject.resolutionHeight).to(equal(resolution.resolutionHeight));
+
+ expect(streamingLifecycleManager.preferredFormats).to(haveCount(streamingLifecycleManager.supportedFormats.count + 1));
+ expect(streamingLifecycleManager.preferredFormats.firstObject.codec).to(equal(testDataSource.extraFormat.codec));
+ expect(streamingLifecycleManager.preferredFormats.firstObject.protocol).to(equal(testDataSource.extraFormat.protocol));
+
+ // The haptic manager should be enabled
+ expect(streamingLifecycleManager.hapticInterface.enableHapticDataRequests).to(equal(YES));
+ });
+
+ it(@"should have decided upon the correct preferred format and resolution", ^{
+ SDLVideoStreamingFormat *preferredFormat = streamingLifecycleManager.preferredFormats[streamingLifecycleManager.preferredFormatIndex];
+ expect(preferredFormat.codec).to(equal(SDLVideoStreamingCodecH264));
+ expect(preferredFormat.protocol).to(equal(SDLVideoStreamingProtocolRTP));
+
+ SDLImageResolution *preferredResolution = streamingLifecycleManager.preferredResolutions[streamingLifecycleManager.preferredResolutionIndex];
+ expect(preferredResolution.resolutionHeight).to(equal(@69));
+ expect(preferredResolution.resolutionWidth).to(equal(@42));
+ });
});
});
- context(@"with missing data", ^{
+ describe(@"after receiving a Video Start ACK", ^{
+ __block SDLProtocolHeader *testVideoHeader = nil;
+ __block SDLProtocolMessage *testVideoMessage = nil;
+ __block SDLControlFramePayloadVideoStartServiceAck *testVideoStartServicePayload = nil;
+ __block int64_t testMTU = 789456;
+ __block int32_t testVideoHeight = 42;
+ __block int32_t testVideoWidth = 32;
+ __block SDLVideoStreamingCodec testVideoCodec = SDLVideoStreamingCodecH264;
+ __block SDLVideoStreamingProtocol testVideoProtocol = SDLVideoStreamingProtocolRTP;
+
beforeEach(^{
- testVideoStartServicePayload = [[SDLControlFramePayloadVideoStartServiceAck alloc] initWithMTU:testMTU height:testVideoHeight width:testVideoWidth protocol:nil codec:nil];
- testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:testVideoStartServicePayload.data];
- [streamingLifecycleManager handleProtocolStartServiceACKMessage:testVideoMessage];
- });
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:NO];
- it(@"should fall back correctly", ^{
- expect(CGSizeEqualToSize(streamingLifecycleManager.screenSize, CGSizeMake(testVideoWidth, testVideoHeight))).to(equal(YES));
- expect(streamingLifecycleManager.videoFormat).to(equal([[SDLVideoStreamingFormat alloc] initWithCodec:SDLVideoStreamingCodecH264 protocol:SDLVideoStreamingProtocolRAW]));
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ testVideoHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
+ testVideoHeader.frameType = SDLFrameTypeSingle;
+ testVideoHeader.frameData = SDLFrameInfoStartServiceACK;
+ testVideoHeader.encrypted = YES;
+ testVideoHeader.serviceType = SDLServiceTypeVideo;
});
- });
- });
- describe(@"after receiving a Video Start NAK", ^{
- __block SDLProtocolHeader *testVideoHeader = nil;
- __block SDLProtocolMessage *testVideoMessage = nil;
- __block SDLControlFramePayloadNak *testVideoStartNakPayload = nil;
+ context(@"with data", ^{
+ beforeEach(^{
+ testVideoStartServicePayload = [[SDLControlFramePayloadVideoStartServiceAck alloc] initWithMTU:testMTU height:testVideoHeight width:testVideoWidth protocol:testVideoProtocol codec:testVideoCodec];
+ testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:testVideoStartServicePayload.data];
+ [streamingLifecycleManager handleProtocolStartServiceACKMessage:testVideoMessage];
+ });
+
+ it(@"should have set all the right properties", ^{
+ expect([[SDLGlobals sharedGlobals] mtuSizeForServiceType:SDLServiceTypeVideo]).to(equal(testMTU));
+ expect(CGSizeEqualToSize(streamingLifecycleManager.screenSize, CGSizeMake(testVideoWidth, testVideoHeight))).to(equal(YES));
+ expect(streamingLifecycleManager.videoEncrypted).to(equal(YES));
+ expect(streamingLifecycleManager.videoFormat).to(equal([[SDLVideoStreamingFormat alloc] initWithCodec:testVideoCodec protocol:testVideoProtocol]));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
+ });
- beforeEach(^{
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:NO];
+ context(@"with missing data", ^{
+ beforeEach(^{
+ testVideoStartServicePayload = [[SDLControlFramePayloadVideoStartServiceAck alloc] initWithMTU:testMTU height:testVideoHeight width:testVideoWidth protocol:nil codec:nil];
+ testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:testVideoStartServicePayload.data];
+ [streamingLifecycleManager handleProtocolStartServiceACKMessage:testVideoMessage];
+ });
- testVideoHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
- testVideoHeader.frameType = SDLFrameTypeSingle;
- testVideoHeader.frameData = SDLFrameInfoStartServiceACK;
- testVideoHeader.encrypted = YES;
- testVideoHeader.serviceType = SDLServiceTypeVideo;
+ it(@"should fall back correctly", ^{
+ expect(CGSizeEqualToSize(streamingLifecycleManager.screenSize, CGSizeMake(testVideoWidth, testVideoHeight))).to(equal(YES));
+ expect(streamingLifecycleManager.videoFormat).to(equal([[SDLVideoStreamingFormat alloc] initWithCodec:SDLVideoStreamingCodecH264 protocol:SDLVideoStreamingProtocolRAW]));
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateReady));
+ });
+ });
});
- context(@"with data", ^{
+ describe(@"after receiving a Video Start NAK", ^{
+ __block SDLProtocolHeader *testVideoHeader = nil;
+ __block SDLProtocolMessage *testVideoMessage = nil;
+ __block SDLControlFramePayloadNak *testVideoStartNakPayload = nil;
+
beforeEach(^{
- testVideoStartNakPayload = [[SDLControlFramePayloadNak alloc] initWithRejectedParams:@[[NSString stringWithUTF8String:SDLControlFrameHeightKey], [NSString stringWithUTF8String:SDLControlFrameVideoCodecKey]]];
- testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:testVideoStartNakPayload.data];
- [streamingLifecycleManager handleProtocolStartServiceNAKMessage:testVideoMessage];
- });
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:NO];
- it(@"should have retried with new properties", ^{
- expect(streamingLifecycleManager.preferredResolutionIndex).to(equal(1));
- expect(streamingLifecycleManager.preferredFormatIndex).to(equal(1));
+ testVideoHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
+ testVideoHeader.frameType = SDLFrameTypeSingle;
+ testVideoHeader.frameData = SDLFrameInfoStartServiceACK;
+ testVideoHeader.encrypted = YES;
+ testVideoHeader.serviceType = SDLServiceTypeVideo;
});
- });
- context(@"with missing data", ^{
- beforeEach(^{
- testVideoStartNakPayload = [[SDLControlFramePayloadNak alloc] initWithRejectedParams:nil];
- testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:testVideoStartNakPayload.data];
- [streamingLifecycleManager handleProtocolStartServiceNAKMessage:testVideoMessage];
+ context(@"with data", ^{
+ beforeEach(^{
+ testVideoStartNakPayload = [[SDLControlFramePayloadNak alloc] initWithRejectedParams:@[[NSString stringWithUTF8String:SDLControlFrameHeightKey], [NSString stringWithUTF8String:SDLControlFrameVideoCodecKey]]];
+ testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:testVideoStartNakPayload.data];
+ [streamingLifecycleManager handleProtocolStartServiceNAKMessage:testVideoMessage];
+ });
+
+ it(@"should have retried with new properties", ^{
+ expect(streamingLifecycleManager.preferredResolutionIndex).to(equal(1));
+ expect(streamingLifecycleManager.preferredFormatIndex).to(equal(1));
+ });
});
- it(@"should end the service", ^{
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ context(@"with missing data", ^{
+ beforeEach(^{
+ testVideoStartNakPayload = [[SDLControlFramePayloadNak alloc] initWithRejectedParams:nil];
+ testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:testVideoStartNakPayload.data];
+ [streamingLifecycleManager handleProtocolStartServiceNAKMessage:testVideoMessage];
+ });
+
+ it(@"should end the service", ^{
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ });
});
});
- });
- describe(@"after receiving a video end ACK", ^{
- __block SDLProtocolHeader *testVideoHeader = nil;
- __block SDLProtocolMessage *testVideoMessage = nil;
+ describe(@"after receiving a video end ACK", ^{
+ __block SDLProtocolHeader *testVideoHeader = nil;
+ __block SDLProtocolMessage *testVideoMessage = nil;
- beforeEach(^{
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:NO];
+ beforeEach(^{
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:NO];
- testVideoHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
- testVideoHeader.frameType = SDLFrameTypeSingle;
- testVideoHeader.frameData = SDLFrameInfoEndServiceACK;
- testVideoHeader.encrypted = NO;
- testVideoHeader.serviceType = SDLServiceTypeVideo;
+ testVideoHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
+ testVideoHeader.frameType = SDLFrameTypeSingle;
+ testVideoHeader.frameData = SDLFrameInfoEndServiceACK;
+ testVideoHeader.encrypted = NO;
+ testVideoHeader.serviceType = SDLServiceTypeVideo;
- testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:nil];
- [streamingLifecycleManager handleProtocolEndServiceACKMessage:testVideoMessage];
- });
+ testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:nil];
+ [streamingLifecycleManager handleProtocolEndServiceACKMessage:testVideoMessage];
+ });
- it(@"should have set all the right properties", ^{
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ it(@"should have set all the right properties", ^{
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ });
});
- });
- describe(@"after receiving a video end NAK", ^{
- __block SDLProtocolHeader *testVideoHeader = nil;
- __block SDLProtocolMessage *testVideoMessage = nil;
+ describe(@"after receiving a video end NAK", ^{
+ __block SDLProtocolHeader *testVideoHeader = nil;
+ __block SDLProtocolMessage *testVideoMessage = nil;
- beforeEach(^{
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:NO];
+ beforeEach(^{
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLVideoStreamStateStarting fromOldState:nil callEnterTransition:NO];
- testVideoHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
- testVideoHeader.frameType = SDLFrameTypeSingle;
- testVideoHeader.frameData = SDLFrameInfoEndServiceNACK;
- testVideoHeader.encrypted = NO;
- testVideoHeader.serviceType = SDLServiceTypeVideo;
+ testVideoHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
+ testVideoHeader.frameType = SDLFrameTypeSingle;
+ testVideoHeader.frameData = SDLFrameInfoEndServiceNACK;
+ testVideoHeader.encrypted = NO;
+ testVideoHeader.serviceType = SDLServiceTypeVideo;
- testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:nil];
- [streamingLifecycleManager handleProtocolEndServiceNAKMessage:testVideoMessage];
- });
+ testVideoMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testVideoHeader andPayload:nil];
+ [streamingLifecycleManager handleProtocolEndServiceNAKMessage:testVideoMessage];
+ });
- it(@"should have set all the right properties", ^{
- expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ it(@"should have set all the right properties", ^{
+ expect(streamingLifecycleManager.currentVideoStreamState).to(equal(SDLVideoStreamStateStopped));
+ });
});
- });
- describe(@"after receiving an Audio Start ACK", ^{
- __block SDLProtocolHeader *testAudioHeader = nil;
- __block SDLProtocolMessage *testAudioMessage = nil;
- __block SDLControlFramePayloadAudioStartServiceAck *testAudioStartServicePayload = nil;
- __block int64_t testMTU = 786579;
+ describe(@"after receiving an Audio Start ACK", ^{
+ __block SDLProtocolHeader *testAudioHeader = nil;
+ __block SDLProtocolMessage *testAudioMessage = nil;
+ __block SDLControlFramePayloadAudioStartServiceAck *testAudioStartServicePayload = nil;
+ __block int64_t testMTU = 786579;
- beforeEach(^{
- [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateStarting fromOldState:nil callEnterTransition:NO];
+ beforeEach(^{
+ [streamingLifecycleManager.audioStreamStateMachine setToState:SDLAudioStreamStateStarting fromOldState:nil callEnterTransition:NO];
- testAudioHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
- testAudioHeader.frameType = SDLFrameTypeSingle;
- testAudioHeader.frameData = SDLFrameInfoStartServiceACK;
- testAudioHeader.encrypted = YES;
- testAudioHeader.serviceType = SDLServiceTypeAudio;
+ testAudioHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
+ testAudioHeader.frameType = SDLFrameTypeSingle;
+ testAudioHeader.frameData = SDLFrameInfoStartServiceACK;
+ testAudioHeader.encrypted = YES;
+ testAudioHeader.serviceType = SDLServiceTypeAudio;
- testAudioStartServicePayload = [[SDLControlFramePayloadAudioStartServiceAck alloc] initWithMTU:testMTU];
- testAudioMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testAudioHeader andPayload:testAudioStartServicePayload.data];
- [streamingLifecycleManager handleProtocolStartServiceACKMessage:testAudioMessage];
- });
+ testAudioStartServicePayload = [[SDLControlFramePayloadAudioStartServiceAck alloc] initWithMTU:testMTU];
+ testAudioMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testAudioHeader andPayload:testAudioStartServicePayload.data];
+ [streamingLifecycleManager handleProtocolStartServiceACKMessage:testAudioMessage];
+ });
- it(@"should have set all the right properties", ^{
- expect([[SDLGlobals sharedGlobals] mtuSizeForServiceType:SDLServiceTypeAudio]).to(equal(testMTU));
- expect(streamingLifecycleManager.audioEncrypted).to(equal(YES));
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ it(@"should have set all the right properties", ^{
+ expect([[SDLGlobals sharedGlobals] mtuSizeForServiceType:SDLServiceTypeAudio]).to(equal(testMTU));
+ expect(streamingLifecycleManager.audioEncrypted).to(equal(YES));
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateReady));
+ });
});
- });
- describe(@"after receiving an Audio Start NAK", ^{
- __block SDLProtocolHeader *testAudioHeader = nil;
- __block SDLProtocolMessage *testAudioMessage = nil;
+ describe(@"after receiving an Audio Start NAK", ^{
+ __block SDLProtocolHeader *testAudioHeader = nil;
+ __block SDLProtocolMessage *testAudioMessage = nil;
- beforeEach(^{
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLAudioStreamStateStarting fromOldState:nil callEnterTransition:NO];
+ beforeEach(^{
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLAudioStreamStateStarting fromOldState:nil callEnterTransition:NO];
- testAudioHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
- testAudioHeader.frameType = SDLFrameTypeSingle;
- testAudioHeader.frameData = SDLFrameInfoStartServiceNACK;
- testAudioHeader.encrypted = NO;
- testAudioHeader.serviceType = SDLServiceTypeAudio;
+ testAudioHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
+ testAudioHeader.frameType = SDLFrameTypeSingle;
+ testAudioHeader.frameData = SDLFrameInfoStartServiceNACK;
+ testAudioHeader.encrypted = NO;
+ testAudioHeader.serviceType = SDLServiceTypeAudio;
- testAudioMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testAudioHeader andPayload:nil];
- [streamingLifecycleManager handleProtocolEndServiceACKMessage:testAudioMessage];
- });
+ testAudioMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testAudioHeader andPayload:nil];
+ [streamingLifecycleManager handleProtocolEndServiceACKMessage:testAudioMessage];
+ });
- it(@"should have set all the right properties", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStopped));
+ it(@"should have set all the right properties", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStopped));
+ });
});
- });
- describe(@"after receiving a audio end ACK", ^{
- __block SDLProtocolHeader *testAudioHeader = nil;
- __block SDLProtocolMessage *testAudioMessage = nil;
+ describe(@"after receiving a audio end ACK", ^{
+ __block SDLProtocolHeader *testAudioHeader = nil;
+ __block SDLProtocolMessage *testAudioMessage = nil;
- beforeEach(^{
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLAudioStreamStateStarting fromOldState:nil callEnterTransition:NO];
+ beforeEach(^{
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLAudioStreamStateStarting fromOldState:nil callEnterTransition:NO];
- testAudioHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
- testAudioHeader.frameType = SDLFrameTypeSingle;
- testAudioHeader.frameData = SDLFrameInfoEndServiceACK;
- testAudioHeader.encrypted = NO;
- testAudioHeader.serviceType = SDLServiceTypeAudio;
+ testAudioHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
+ testAudioHeader.frameType = SDLFrameTypeSingle;
+ testAudioHeader.frameData = SDLFrameInfoEndServiceACK;
+ testAudioHeader.encrypted = NO;
+ testAudioHeader.serviceType = SDLServiceTypeAudio;
- testAudioMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testAudioHeader andPayload:nil];
- [streamingLifecycleManager handleProtocolEndServiceACKMessage:testAudioMessage];
- });
+ testAudioMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testAudioHeader andPayload:nil];
+ [streamingLifecycleManager handleProtocolEndServiceACKMessage:testAudioMessage];
+ });
- it(@"should have set all the right properties", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStopped));
+ it(@"should have set all the right properties", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStopped));
+ });
});
- });
- describe(@"after receiving a audio end NAK", ^{
- __block SDLProtocolHeader *testAudioHeader = nil;
- __block SDLProtocolMessage *testAudioMessage = nil;
+ describe(@"after receiving a audio end NAK", ^{
+ __block SDLProtocolHeader *testAudioHeader = nil;
+ __block SDLProtocolMessage *testAudioMessage = nil;
- beforeEach(^{
- [streamingLifecycleManager.videoStreamStateMachine setToState:SDLAudioStreamStateStarting fromOldState:nil callEnterTransition:NO];
+ beforeEach(^{
+ [streamingLifecycleManager.videoStreamStateMachine setToState:SDLAudioStreamStateStarting fromOldState:nil callEnterTransition:NO];
- testAudioHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
- testAudioHeader.frameType = SDLFrameTypeSingle;
- testAudioHeader.frameData = SDLFrameInfoEndServiceNACK;
- testAudioHeader.encrypted = NO;
- testAudioHeader.serviceType = SDLServiceTypeAudio;
+ testAudioHeader = [[SDLV2ProtocolHeader alloc] initWithVersion:5];
+ testAudioHeader.frameType = SDLFrameTypeSingle;
+ testAudioHeader.frameData = SDLFrameInfoEndServiceNACK;
+ testAudioHeader.encrypted = NO;
+ testAudioHeader.serviceType = SDLServiceTypeAudio;
- testAudioMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testAudioHeader andPayload:nil];
- [streamingLifecycleManager handleProtocolEndServiceNAKMessage:testAudioMessage];
- });
+ testAudioMessage = [[SDLV2ProtocolMessage alloc] initWithHeader:testAudioHeader andPayload:nil];
+ [streamingLifecycleManager handleProtocolEndServiceNAKMessage:testAudioMessage];
+ });
- it(@"should have set all the right properties", ^{
- expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStopped));
+ it(@"should have set all the right properties", ^{
+ expect(streamingLifecycleManager.currentAudioStreamState).to(equal(SDLAudioStreamStateStopped));
+ });
});
});
});