henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 1 | /* |
| 2 | * libjingle |
| 3 | * Copyright 2012, Google Inc. |
| 4 | * |
| 5 | * Redistribution and use in source and binary forms, with or without |
| 6 | * modification, are permitted provided that the following conditions are met: |
| 7 | * |
| 8 | * 1. Redistributions of source code must retain the above copyright notice, |
| 9 | * this list of conditions and the following disclaimer. |
| 10 | * 2. Redistributions in binary form must reproduce the above copyright notice, |
| 11 | * this list of conditions and the following disclaimer in the documentation |
| 12 | * and/or other materials provided with the distribution. |
| 13 | * 3. The name of the author may not be used to endorse or promote products |
| 14 | * derived from this software without specific prior written permission. |
| 15 | * |
| 16 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED |
| 17 | * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
| 18 | * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
| 19 | * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 20 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 21 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
| 22 | * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
| 23 | * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
| 24 | * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
| 25 | * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 | */ |
| 27 | |
| 28 | #include <string> |
| 29 | |
| 30 | #include "talk/app/webrtc/audiotrack.h" |
| 31 | #include "talk/app/webrtc/mediastream.h" |
| 32 | #include "talk/app/webrtc/mediastreamsignaling.h" |
| 33 | #include "talk/app/webrtc/streamcollection.h" |
| 34 | #include "talk/app/webrtc/test/fakeconstraints.h" |
wu@webrtc.org | cecfd18 | 2013-10-30 05:18:12 +0000 | [diff] [blame] | 35 | #include "talk/app/webrtc/test/fakedatachannelprovider.h" |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 36 | #include "talk/app/webrtc/videotrack.h" |
| 37 | #include "talk/base/gunit.h" |
| 38 | #include "talk/base/scoped_ptr.h" |
| 39 | #include "talk/base/stringutils.h" |
| 40 | #include "talk/base/thread.h" |
wu@webrtc.org | 967bfff | 2013-09-19 05:49:50 +0000 | [diff] [blame] | 41 | #include "talk/media/base/fakemediaengine.h" |
| 42 | #include "talk/media/devices/fakedevicemanager.h" |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 43 | #include "talk/p2p/base/constants.h" |
| 44 | #include "talk/p2p/base/sessiondescription.h" |
wu@webrtc.org | 967bfff | 2013-09-19 05:49:50 +0000 | [diff] [blame] | 45 | #include "talk/session/media/channelmanager.h" |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 46 | |
| 47 | static const char kStreams[][8] = {"stream1", "stream2"}; |
| 48 | static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"}; |
| 49 | static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"}; |
| 50 | |
| 51 | using webrtc::AudioTrack; |
| 52 | using webrtc::AudioTrackInterface; |
| 53 | using webrtc::AudioTrackVector; |
| 54 | using webrtc::VideoTrack; |
| 55 | using webrtc::VideoTrackInterface; |
| 56 | using webrtc::VideoTrackVector; |
| 57 | using webrtc::DataChannelInterface; |
| 58 | using webrtc::FakeConstraints; |
| 59 | using webrtc::IceCandidateInterface; |
| 60 | using webrtc::MediaConstraintsInterface; |
| 61 | using webrtc::MediaStreamInterface; |
| 62 | using webrtc::MediaStreamTrackInterface; |
| 63 | using webrtc::SdpParseError; |
| 64 | using webrtc::SessionDescriptionInterface; |
| 65 | using webrtc::StreamCollection; |
| 66 | using webrtc::StreamCollectionInterface; |
| 67 | |
| 68 | // Reference SDP with a MediaStream with label "stream1" and audio track with |
| 69 | // id "audio_1" and a video track with id "video_1; |
| 70 | static const char kSdpStringWithStream1[] = |
| 71 | "v=0\r\n" |
| 72 | "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 73 | "s=-\r\n" |
| 74 | "t=0 0\r\n" |
| 75 | "m=audio 1 RTP/AVPF 103\r\n" |
| 76 | "a=mid:audio\r\n" |
| 77 | "a=rtpmap:103 ISAC/16000\r\n" |
| 78 | "a=ssrc:1 cname:stream1\r\n" |
| 79 | "a=ssrc:1 mslabel:stream1\r\n" |
| 80 | "a=ssrc:1 label:audiotrack0\r\n" |
| 81 | "m=video 1 RTP/AVPF 120\r\n" |
| 82 | "a=mid:video\r\n" |
| 83 | "a=rtpmap:120 VP8/90000\r\n" |
| 84 | "a=ssrc:2 cname:stream1\r\n" |
| 85 | "a=ssrc:2 mslabel:stream1\r\n" |
| 86 | "a=ssrc:2 label:videotrack0\r\n"; |
| 87 | |
| 88 | // Reference SDP with two MediaStreams with label "stream1" and "stream2. Each |
| 89 | // MediaStreams have one audio track and one video track. |
| 90 | // This uses MSID. |
| 91 | static const char kSdpStringWith2Stream[] = |
| 92 | "v=0\r\n" |
| 93 | "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 94 | "s=-\r\n" |
| 95 | "t=0 0\r\n" |
| 96 | "a=msid-semantic: WMS stream1 stream2\r\n" |
| 97 | "m=audio 1 RTP/AVPF 103\r\n" |
| 98 | "a=mid:audio\r\n" |
| 99 | "a=rtpmap:103 ISAC/16000\r\n" |
| 100 | "a=ssrc:1 cname:stream1\r\n" |
| 101 | "a=ssrc:1 msid:stream1 audiotrack0\r\n" |
| 102 | "a=ssrc:3 cname:stream2\r\n" |
| 103 | "a=ssrc:3 msid:stream2 audiotrack1\r\n" |
| 104 | "m=video 1 RTP/AVPF 120\r\n" |
| 105 | "a=mid:video\r\n" |
| 106 | "a=rtpmap:120 VP8/0\r\n" |
| 107 | "a=ssrc:2 cname:stream1\r\n" |
| 108 | "a=ssrc:2 msid:stream1 videotrack0\r\n" |
| 109 | "a=ssrc:4 cname:stream2\r\n" |
| 110 | "a=ssrc:4 msid:stream2 videotrack1\r\n"; |
| 111 | |
| 112 | // Reference SDP without MediaStreams. Msid is not supported. |
| 113 | static const char kSdpStringWithoutStreams[] = |
| 114 | "v=0\r\n" |
| 115 | "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 116 | "s=-\r\n" |
| 117 | "t=0 0\r\n" |
| 118 | "m=audio 1 RTP/AVPF 103\r\n" |
| 119 | "a=mid:audio\r\n" |
| 120 | "a=rtpmap:103 ISAC/16000\r\n" |
| 121 | "m=video 1 RTP/AVPF 120\r\n" |
| 122 | "a=mid:video\r\n" |
| 123 | "a=rtpmap:120 VP8/90000\r\n"; |
| 124 | |
| 125 | // Reference SDP without MediaStreams. Msid is supported. |
| 126 | static const char kSdpStringWithMsidWithoutStreams[] = |
| 127 | "v=0\r\n" |
| 128 | "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 129 | "s=-\r\n" |
| 130 | "t=0 0\r\n" |
wu@webrtc.org | cecfd18 | 2013-10-30 05:18:12 +0000 | [diff] [blame] | 131 | "a=msid-semantic: WMS\r\n" |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 132 | "m=audio 1 RTP/AVPF 103\r\n" |
| 133 | "a=mid:audio\r\n" |
| 134 | "a=rtpmap:103 ISAC/16000\r\n" |
| 135 | "m=video 1 RTP/AVPF 120\r\n" |
| 136 | "a=mid:video\r\n" |
| 137 | "a=rtpmap:120 VP8/90000\r\n"; |
| 138 | |
| 139 | // Reference SDP without MediaStreams and audio only. |
| 140 | static const char kSdpStringWithoutStreamsAudioOnly[] = |
| 141 | "v=0\r\n" |
| 142 | "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 143 | "s=-\r\n" |
| 144 | "t=0 0\r\n" |
| 145 | "m=audio 1 RTP/AVPF 103\r\n" |
| 146 | "a=mid:audio\r\n" |
| 147 | "a=rtpmap:103 ISAC/16000\r\n"; |
| 148 | |
| 149 | static const char kSdpStringInit[] = |
| 150 | "v=0\r\n" |
| 151 | "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 152 | "s=-\r\n" |
| 153 | "t=0 0\r\n" |
| 154 | "a=msid-semantic: WMS\r\n"; |
| 155 | |
| 156 | static const char kSdpStringAudio[] = |
| 157 | "m=audio 1 RTP/AVPF 103\r\n" |
| 158 | "a=mid:audio\r\n" |
| 159 | "a=rtpmap:103 ISAC/16000\r\n"; |
| 160 | |
| 161 | static const char kSdpStringVideo[] = |
| 162 | "m=video 1 RTP/AVPF 120\r\n" |
| 163 | "a=mid:video\r\n" |
| 164 | "a=rtpmap:120 VP8/90000\r\n"; |
| 165 | |
| 166 | static const char kSdpStringMs1Audio0[] = |
| 167 | "a=ssrc:1 cname:stream1\r\n" |
| 168 | "a=ssrc:1 msid:stream1 audiotrack0\r\n"; |
| 169 | |
| 170 | static const char kSdpStringMs1Video0[] = |
| 171 | "a=ssrc:2 cname:stream1\r\n" |
| 172 | "a=ssrc:2 msid:stream1 videotrack0\r\n"; |
| 173 | |
| 174 | static const char kSdpStringMs1Audio1[] = |
| 175 | "a=ssrc:3 cname:stream1\r\n" |
| 176 | "a=ssrc:3 msid:stream1 audiotrack1\r\n"; |
| 177 | |
| 178 | static const char kSdpStringMs1Video1[] = |
| 179 | "a=ssrc:4 cname:stream1\r\n" |
| 180 | "a=ssrc:4 msid:stream1 videotrack1\r\n"; |
| 181 | |
| 182 | // Verifies that |options| contain all tracks in |collection| and that |
| 183 | // the |options| has set the the has_audio and has_video flags correct. |
| 184 | static void VerifyMediaOptions(StreamCollectionInterface* collection, |
| 185 | const cricket::MediaSessionOptions& options) { |
| 186 | if (!collection) { |
| 187 | return; |
| 188 | } |
| 189 | |
| 190 | size_t stream_index = 0; |
| 191 | for (size_t i = 0; i < collection->count(); ++i) { |
| 192 | MediaStreamInterface* stream = collection->at(i); |
| 193 | AudioTrackVector audio_tracks = stream->GetAudioTracks(); |
| 194 | ASSERT_GE(options.streams.size(), stream_index + audio_tracks.size()); |
| 195 | for (size_t j = 0; j < audio_tracks.size(); ++j) { |
| 196 | webrtc::AudioTrackInterface* audio = audio_tracks[j]; |
| 197 | EXPECT_EQ(options.streams[stream_index].sync_label, stream->label()); |
| 198 | EXPECT_EQ(options.streams[stream_index++].id, audio->id()); |
| 199 | EXPECT_TRUE(options.has_audio); |
| 200 | } |
| 201 | VideoTrackVector video_tracks = stream->GetVideoTracks(); |
| 202 | ASSERT_GE(options.streams.size(), stream_index + video_tracks.size()); |
| 203 | for (size_t j = 0; j < video_tracks.size(); ++j) { |
| 204 | webrtc::VideoTrackInterface* video = video_tracks[j]; |
| 205 | EXPECT_EQ(options.streams[stream_index].sync_label, stream->label()); |
| 206 | EXPECT_EQ(options.streams[stream_index++].id, video->id()); |
| 207 | EXPECT_TRUE(options.has_video); |
| 208 | } |
| 209 | } |
| 210 | } |
| 211 | |
| 212 | static bool CompareStreamCollections(StreamCollectionInterface* s1, |
| 213 | StreamCollectionInterface* s2) { |
| 214 | if (s1 == NULL || s2 == NULL || s1->count() != s2->count()) |
| 215 | return false; |
| 216 | |
| 217 | for (size_t i = 0; i != s1->count(); ++i) { |
| 218 | if (s1->at(i)->label() != s2->at(i)->label()) |
| 219 | return false; |
| 220 | webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks(); |
| 221 | webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks(); |
| 222 | webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks(); |
| 223 | webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks(); |
| 224 | |
| 225 | if (audio_tracks1.size() != audio_tracks2.size()) |
| 226 | return false; |
| 227 | for (size_t j = 0; j != audio_tracks1.size(); ++j) { |
| 228 | if (audio_tracks1[j]->id() != audio_tracks2[j]->id()) |
| 229 | return false; |
| 230 | } |
| 231 | if (video_tracks1.size() != video_tracks2.size()) |
| 232 | return false; |
| 233 | for (size_t j = 0; j != video_tracks1.size(); ++j) { |
| 234 | if (video_tracks1[j]->id() != video_tracks2[j]->id()) |
| 235 | return false; |
| 236 | } |
| 237 | } |
| 238 | return true; |
| 239 | } |
| 240 | |
| 241 | class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver { |
| 242 | public: |
| 243 | MockSignalingObserver() |
| 244 | : remote_media_streams_(StreamCollection::Create()) { |
| 245 | } |
| 246 | |
| 247 | virtual ~MockSignalingObserver() { |
| 248 | } |
| 249 | |
| 250 | // New remote stream have been discovered. |
| 251 | virtual void OnAddRemoteStream(MediaStreamInterface* remote_stream) { |
| 252 | remote_media_streams_->AddStream(remote_stream); |
| 253 | } |
| 254 | |
| 255 | // Remote stream is no longer available. |
| 256 | virtual void OnRemoveRemoteStream(MediaStreamInterface* remote_stream) { |
| 257 | remote_media_streams_->RemoveStream(remote_stream); |
| 258 | } |
| 259 | |
| 260 | virtual void OnAddDataChannel(DataChannelInterface* data_channel) { |
| 261 | } |
| 262 | |
| 263 | virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream, |
| 264 | AudioTrackInterface* audio_track, |
| 265 | uint32 ssrc) { |
| 266 | AddTrack(&local_audio_tracks_, stream, audio_track, ssrc); |
| 267 | } |
| 268 | |
| 269 | virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream, |
| 270 | VideoTrackInterface* video_track, |
| 271 | uint32 ssrc) { |
| 272 | AddTrack(&local_video_tracks_, stream, video_track, ssrc); |
| 273 | } |
| 274 | |
| 275 | virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream, |
| 276 | AudioTrackInterface* audio_track) { |
| 277 | RemoveTrack(&local_audio_tracks_, stream, audio_track); |
| 278 | } |
| 279 | |
| 280 | virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream, |
| 281 | VideoTrackInterface* video_track) { |
| 282 | RemoveTrack(&local_video_tracks_, stream, video_track); |
| 283 | } |
| 284 | |
| 285 | virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream, |
| 286 | AudioTrackInterface* audio_track, |
| 287 | uint32 ssrc) { |
| 288 | AddTrack(&remote_audio_tracks_, stream, audio_track, ssrc); |
| 289 | } |
| 290 | |
| 291 | virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream, |
wu@webrtc.org | 967bfff | 2013-09-19 05:49:50 +0000 | [diff] [blame] | 292 | VideoTrackInterface* video_track, |
| 293 | uint32 ssrc) { |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 294 | AddTrack(&remote_video_tracks_, stream, video_track, ssrc); |
| 295 | } |
| 296 | |
| 297 | virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream, |
wu@webrtc.org | 967bfff | 2013-09-19 05:49:50 +0000 | [diff] [blame] | 298 | AudioTrackInterface* audio_track) { |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 299 | RemoveTrack(&remote_audio_tracks_, stream, audio_track); |
| 300 | } |
| 301 | |
| 302 | virtual void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream, |
| 303 | VideoTrackInterface* video_track) { |
| 304 | RemoveTrack(&remote_video_tracks_, stream, video_track); |
| 305 | } |
| 306 | |
| 307 | virtual void OnRemoveLocalStream(MediaStreamInterface* stream) { |
| 308 | } |
| 309 | |
| 310 | MediaStreamInterface* RemoteStream(const std::string& label) { |
| 311 | return remote_media_streams_->find(label); |
| 312 | } |
| 313 | |
| 314 | StreamCollectionInterface* remote_streams() const { |
| 315 | return remote_media_streams_; |
| 316 | } |
| 317 | |
| 318 | size_t NumberOfRemoteAudioTracks() { return remote_audio_tracks_.size(); } |
| 319 | |
| 320 | void VerifyRemoteAudioTrack(const std::string& stream_label, |
| 321 | const std::string& track_id, |
| 322 | uint32 ssrc) { |
| 323 | VerifyTrack(remote_audio_tracks_, stream_label, track_id, ssrc); |
| 324 | } |
| 325 | |
| 326 | size_t NumberOfRemoteVideoTracks() { return remote_video_tracks_.size(); } |
| 327 | |
| 328 | void VerifyRemoteVideoTrack(const std::string& stream_label, |
| 329 | const std::string& track_id, |
| 330 | uint32 ssrc) { |
| 331 | VerifyTrack(remote_video_tracks_, stream_label, track_id, ssrc); |
| 332 | } |
| 333 | |
| 334 | size_t NumberOfLocalAudioTracks() { return local_audio_tracks_.size(); } |
| 335 | void VerifyLocalAudioTrack(const std::string& stream_label, |
| 336 | const std::string& track_id, |
| 337 | uint32 ssrc) { |
| 338 | VerifyTrack(local_audio_tracks_, stream_label, track_id, ssrc); |
| 339 | } |
| 340 | |
| 341 | size_t NumberOfLocalVideoTracks() { return local_video_tracks_.size(); } |
| 342 | |
| 343 | void VerifyLocalVideoTrack(const std::string& stream_label, |
| 344 | const std::string& track_id, |
| 345 | uint32 ssrc) { |
| 346 | VerifyTrack(local_video_tracks_, stream_label, track_id, ssrc); |
| 347 | } |
| 348 | |
| 349 | private: |
| 350 | struct TrackInfo { |
| 351 | TrackInfo() {} |
| 352 | TrackInfo(const std::string& stream_label, const std::string track_id, |
| 353 | uint32 ssrc) |
| 354 | : stream_label(stream_label), |
| 355 | track_id(track_id), |
| 356 | ssrc(ssrc) { |
| 357 | } |
| 358 | std::string stream_label; |
| 359 | std::string track_id; |
| 360 | uint32 ssrc; |
| 361 | }; |
| 362 | typedef std::map<std::string, TrackInfo> TrackInfos; |
| 363 | |
| 364 | void AddTrack(TrackInfos* track_infos, MediaStreamInterface* stream, |
| 365 | MediaStreamTrackInterface* track, |
| 366 | uint32 ssrc) { |
| 367 | (*track_infos)[track->id()] = TrackInfo(stream->label(), track->id(), |
| 368 | ssrc); |
| 369 | } |
| 370 | |
| 371 | void RemoveTrack(TrackInfos* track_infos, MediaStreamInterface* stream, |
| 372 | MediaStreamTrackInterface* track) { |
| 373 | TrackInfos::iterator it = track_infos->find(track->id()); |
| 374 | ASSERT_TRUE(it != track_infos->end()); |
| 375 | ASSERT_EQ(it->second.stream_label, stream->label()); |
| 376 | track_infos->erase(it); |
| 377 | } |
| 378 | |
| 379 | void VerifyTrack(const TrackInfos& track_infos, |
| 380 | const std::string& stream_label, |
| 381 | const std::string& track_id, |
| 382 | uint32 ssrc) { |
| 383 | TrackInfos::const_iterator it = track_infos.find(track_id); |
| 384 | ASSERT_TRUE(it != track_infos.end()); |
| 385 | EXPECT_EQ(stream_label, it->second.stream_label); |
| 386 | EXPECT_EQ(ssrc, it->second.ssrc); |
| 387 | } |
| 388 | |
| 389 | TrackInfos remote_audio_tracks_; |
| 390 | TrackInfos remote_video_tracks_; |
| 391 | TrackInfos local_audio_tracks_; |
| 392 | TrackInfos local_video_tracks_; |
| 393 | |
| 394 | talk_base::scoped_refptr<StreamCollection> remote_media_streams_; |
| 395 | }; |
| 396 | |
| 397 | class MediaStreamSignalingForTest : public webrtc::MediaStreamSignaling { |
| 398 | public: |
wu@webrtc.org | 967bfff | 2013-09-19 05:49:50 +0000 | [diff] [blame] | 399 | MediaStreamSignalingForTest(MockSignalingObserver* observer, |
| 400 | cricket::ChannelManager* channel_manager) |
| 401 | : webrtc::MediaStreamSignaling(talk_base::Thread::Current(), observer, |
| 402 | channel_manager) { |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 403 | }; |
| 404 | |
| 405 | using webrtc::MediaStreamSignaling::GetOptionsForOffer; |
| 406 | using webrtc::MediaStreamSignaling::GetOptionsForAnswer; |
| 407 | using webrtc::MediaStreamSignaling::OnRemoteDescriptionChanged; |
| 408 | using webrtc::MediaStreamSignaling::remote_streams; |
| 409 | }; |
| 410 | |
| 411 | class MediaStreamSignalingTest: public testing::Test { |
| 412 | protected: |
| 413 | virtual void SetUp() { |
| 414 | observer_.reset(new MockSignalingObserver()); |
wu@webrtc.org | 967bfff | 2013-09-19 05:49:50 +0000 | [diff] [blame] | 415 | channel_manager_.reset( |
| 416 | new cricket::ChannelManager(new cricket::FakeMediaEngine(), |
| 417 | new cricket::FakeDeviceManager(), |
| 418 | talk_base::Thread::Current())); |
| 419 | signaling_.reset(new MediaStreamSignalingForTest(observer_.get(), |
| 420 | channel_manager_.get())); |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 421 | } |
| 422 | |
| 423 | // Create a collection of streams. |
| 424 | // CreateStreamCollection(1) creates a collection that |
| 425 | // correspond to kSdpString1. |
| 426 | // CreateStreamCollection(2) correspond to kSdpString2. |
| 427 | talk_base::scoped_refptr<StreamCollection> |
| 428 | CreateStreamCollection(int number_of_streams) { |
| 429 | talk_base::scoped_refptr<StreamCollection> local_collection( |
| 430 | StreamCollection::Create()); |
| 431 | |
| 432 | for (int i = 0; i < number_of_streams; ++i) { |
| 433 | talk_base::scoped_refptr<webrtc::MediaStreamInterface> stream( |
| 434 | webrtc::MediaStream::Create(kStreams[i])); |
| 435 | |
| 436 | // Add a local audio track. |
| 437 | talk_base::scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
| 438 | webrtc::AudioTrack::Create(kAudioTracks[i], NULL)); |
| 439 | stream->AddTrack(audio_track); |
| 440 | |
| 441 | // Add a local video track. |
| 442 | talk_base::scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| 443 | webrtc::VideoTrack::Create(kVideoTracks[i], NULL)); |
| 444 | stream->AddTrack(video_track); |
| 445 | |
| 446 | local_collection->AddStream(stream); |
| 447 | } |
| 448 | return local_collection; |
| 449 | } |
| 450 | |
| 451 | // This functions Creates a MediaStream with label kStreams[0] and |
| 452 | // |number_of_audio_tracks| and |number_of_video_tracks| tracks and the |
| 453 | // corresponding SessionDescriptionInterface. The SessionDescriptionInterface |
| 454 | // is returned in |desc| and the MediaStream is stored in |
| 455 | // |reference_collection_| |
| 456 | void CreateSessionDescriptionAndReference( |
| 457 | size_t number_of_audio_tracks, |
| 458 | size_t number_of_video_tracks, |
| 459 | SessionDescriptionInterface** desc) { |
| 460 | ASSERT_TRUE(desc != NULL); |
| 461 | ASSERT_LE(number_of_audio_tracks, 2u); |
| 462 | ASSERT_LE(number_of_video_tracks, 2u); |
| 463 | |
| 464 | reference_collection_ = StreamCollection::Create(); |
| 465 | std::string sdp_ms1 = std::string(kSdpStringInit); |
| 466 | |
| 467 | std::string mediastream_label = kStreams[0]; |
| 468 | |
| 469 | talk_base::scoped_refptr<webrtc::MediaStreamInterface> stream( |
| 470 | webrtc::MediaStream::Create(mediastream_label)); |
| 471 | reference_collection_->AddStream(stream); |
| 472 | |
| 473 | if (number_of_audio_tracks > 0) { |
| 474 | sdp_ms1 += std::string(kSdpStringAudio); |
| 475 | sdp_ms1 += std::string(kSdpStringMs1Audio0); |
| 476 | AddAudioTrack(kAudioTracks[0], stream); |
| 477 | } |
| 478 | if (number_of_audio_tracks > 1) { |
| 479 | sdp_ms1 += kSdpStringMs1Audio1; |
| 480 | AddAudioTrack(kAudioTracks[1], stream); |
| 481 | } |
| 482 | |
| 483 | if (number_of_video_tracks > 0) { |
| 484 | sdp_ms1 += std::string(kSdpStringVideo); |
| 485 | sdp_ms1 += std::string(kSdpStringMs1Video0); |
| 486 | AddVideoTrack(kVideoTracks[0], stream); |
| 487 | } |
| 488 | if (number_of_video_tracks > 1) { |
| 489 | sdp_ms1 += kSdpStringMs1Video1; |
| 490 | AddVideoTrack(kVideoTracks[1], stream); |
| 491 | } |
| 492 | |
| 493 | *desc = webrtc::CreateSessionDescription( |
| 494 | SessionDescriptionInterface::kOffer, sdp_ms1, NULL); |
| 495 | } |
| 496 | |
| 497 | void AddAudioTrack(const std::string& track_id, |
| 498 | MediaStreamInterface* stream) { |
| 499 | talk_base::scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
| 500 | webrtc::AudioTrack::Create(track_id, NULL)); |
| 501 | ASSERT_TRUE(stream->AddTrack(audio_track)); |
| 502 | } |
| 503 | |
| 504 | void AddVideoTrack(const std::string& track_id, |
| 505 | MediaStreamInterface* stream) { |
| 506 | talk_base::scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| 507 | webrtc::VideoTrack::Create(track_id, NULL)); |
| 508 | ASSERT_TRUE(stream->AddTrack(video_track)); |
| 509 | } |
| 510 | |
wu@webrtc.org | 967bfff | 2013-09-19 05:49:50 +0000 | [diff] [blame] | 511 | // ChannelManager is used by VideoSource, so it should be released after all |
| 512 | // the video tracks. Put it as the first private variable should ensure that. |
| 513 | talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_; |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 514 | talk_base::scoped_refptr<StreamCollection> reference_collection_; |
| 515 | talk_base::scoped_ptr<MockSignalingObserver> observer_; |
| 516 | talk_base::scoped_ptr<MediaStreamSignalingForTest> signaling_; |
| 517 | }; |
| 518 | |
| 519 | // Test that a MediaSessionOptions is created for an offer if |
| 520 | // kOfferToReceiveAudio and kOfferToReceiveVideo constraints are set but no |
| 521 | // MediaStreams are sent. |
| 522 | TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudioVideo) { |
| 523 | FakeConstraints constraints; |
| 524 | constraints.SetMandatoryReceiveAudio(true); |
| 525 | constraints.SetMandatoryReceiveVideo(true); |
| 526 | cricket::MediaSessionOptions options; |
| 527 | EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); |
| 528 | EXPECT_TRUE(options.has_audio); |
| 529 | EXPECT_TRUE(options.has_video); |
| 530 | EXPECT_TRUE(options.bundle_enabled); |
| 531 | } |
| 532 | |
| 533 | // Test that a correct MediaSessionOptions is created for an offer if |
| 534 | // kOfferToReceiveAudio constraints is set but no MediaStreams are sent. |
| 535 | TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudio) { |
| 536 | FakeConstraints constraints; |
| 537 | constraints.SetMandatoryReceiveAudio(true); |
| 538 | cricket::MediaSessionOptions options; |
| 539 | EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); |
| 540 | EXPECT_TRUE(options.has_audio); |
| 541 | EXPECT_FALSE(options.has_video); |
| 542 | EXPECT_TRUE(options.bundle_enabled); |
| 543 | } |
| 544 | |
| 545 | // Test that a correct MediaSessionOptions is created for an offer if |
| 546 | // no constraints or MediaStreams are sent. |
| 547 | TEST_F(MediaStreamSignalingTest, GetDefaultMediaSessionOptionsForOffer) { |
| 548 | cricket::MediaSessionOptions options; |
| 549 | EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options)); |
| 550 | EXPECT_TRUE(options.has_audio); |
| 551 | EXPECT_FALSE(options.has_video); |
| 552 | EXPECT_TRUE(options.bundle_enabled); |
| 553 | } |
| 554 | |
| 555 | // Test that a correct MediaSessionOptions is created for an offer if |
| 556 | // kOfferToReceiveVideo constraints is set but no MediaStreams are sent. |
| 557 | TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithVideo) { |
| 558 | FakeConstraints constraints; |
| 559 | constraints.SetMandatoryReceiveAudio(false); |
| 560 | constraints.SetMandatoryReceiveVideo(true); |
| 561 | cricket::MediaSessionOptions options; |
| 562 | EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); |
| 563 | EXPECT_FALSE(options.has_audio); |
| 564 | EXPECT_TRUE(options.has_video); |
| 565 | EXPECT_TRUE(options.bundle_enabled); |
| 566 | } |
| 567 | |
| 568 | // Test that a correct MediaSessionOptions is created for an offer if |
| 569 | // kUseRtpMux constraints is set to false. |
| 570 | TEST_F(MediaStreamSignalingTest, |
| 571 | GetMediaSessionOptionsForOfferWithBundleDisabled) { |
| 572 | FakeConstraints constraints; |
| 573 | constraints.SetMandatoryReceiveAudio(true); |
| 574 | constraints.SetMandatoryReceiveVideo(true); |
| 575 | constraints.SetMandatoryUseRtpMux(false); |
| 576 | cricket::MediaSessionOptions options; |
| 577 | EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); |
| 578 | EXPECT_TRUE(options.has_audio); |
| 579 | EXPECT_TRUE(options.has_video); |
| 580 | EXPECT_FALSE(options.bundle_enabled); |
| 581 | } |
| 582 | |
| 583 | // Test that a correct MediaSessionOptions is created to restart ice if |
| 584 | // kIceRestart constraints is set. It also tests that subsequent |
| 585 | // MediaSessionOptions don't have |transport_options.ice_restart| set. |
| 586 | TEST_F(MediaStreamSignalingTest, |
| 587 | GetMediaSessionOptionsForOfferWithIceRestart) { |
| 588 | FakeConstraints constraints; |
| 589 | constraints.SetMandatoryIceRestart(true); |
| 590 | cricket::MediaSessionOptions options; |
| 591 | EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options)); |
| 592 | EXPECT_TRUE(options.transport_options.ice_restart); |
| 593 | |
| 594 | EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options)); |
| 595 | EXPECT_FALSE(options.transport_options.ice_restart); |
| 596 | } |
| 597 | |
| 598 | // Test that GetMediaSessionOptionsForOffer and GetOptionsForAnswer work as |
| 599 | // expected if unknown constraints are used. |
| 600 | TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsWithBadConstraints) { |
| 601 | FakeConstraints mandatory; |
| 602 | mandatory.AddMandatory("bad_key", "bad_value"); |
| 603 | cricket::MediaSessionOptions options; |
| 604 | EXPECT_FALSE(signaling_->GetOptionsForOffer(&mandatory, &options)); |
| 605 | EXPECT_FALSE(signaling_->GetOptionsForAnswer(&mandatory, &options)); |
| 606 | |
| 607 | FakeConstraints optional; |
| 608 | optional.AddOptional("bad_key", "bad_value"); |
| 609 | EXPECT_TRUE(signaling_->GetOptionsForOffer(&optional, &options)); |
| 610 | EXPECT_TRUE(signaling_->GetOptionsForAnswer(&optional, &options)); |
| 611 | } |
| 612 | |
| 613 | // Test that a correct MediaSessionOptions are created for an offer if |
| 614 | // a MediaStream is sent and later updated with a new track. |
| 615 | // MediaConstraints are not used. |
| 616 | TEST_F(MediaStreamSignalingTest, AddTrackToLocalMediaStream) { |
| 617 | talk_base::scoped_refptr<StreamCollection> local_streams( |
| 618 | CreateStreamCollection(1)); |
| 619 | MediaStreamInterface* local_stream = local_streams->at(0); |
| 620 | EXPECT_TRUE(signaling_->AddLocalStream(local_stream)); |
| 621 | cricket::MediaSessionOptions options; |
| 622 | EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options)); |
| 623 | VerifyMediaOptions(local_streams, options); |
| 624 | |
| 625 | cricket::MediaSessionOptions updated_options; |
| 626 | local_stream->AddTrack(AudioTrack::Create(kAudioTracks[1], NULL)); |
| 627 | EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options)); |
| 628 | VerifyMediaOptions(local_streams, options); |
| 629 | } |
| 630 | |
| 631 | // Test that the MediaConstraints in an answer don't affect if audio and video |
| 632 | // is offered in an offer but that if kOfferToReceiveAudio or |
| 633 | // kOfferToReceiveVideo constraints are true in an offer, the media type will be |
| 634 | // included in subsequent answers. |
| 635 | TEST_F(MediaStreamSignalingTest, MediaConstraintsInAnswer) { |
| 636 | FakeConstraints answer_c; |
| 637 | answer_c.SetMandatoryReceiveAudio(true); |
| 638 | answer_c.SetMandatoryReceiveVideo(true); |
| 639 | |
| 640 | cricket::MediaSessionOptions answer_options; |
| 641 | EXPECT_TRUE(signaling_->GetOptionsForAnswer(&answer_c, &answer_options)); |
| 642 | EXPECT_TRUE(answer_options.has_audio); |
| 643 | EXPECT_TRUE(answer_options.has_video); |
| 644 | |
| 645 | FakeConstraints offer_c; |
| 646 | offer_c.SetMandatoryReceiveAudio(false); |
| 647 | offer_c.SetMandatoryReceiveVideo(false); |
| 648 | |
| 649 | cricket::MediaSessionOptions offer_options; |
| 650 | EXPECT_TRUE(signaling_->GetOptionsForOffer(&offer_c, &offer_options)); |
| 651 | EXPECT_FALSE(offer_options.has_audio); |
| 652 | EXPECT_FALSE(offer_options.has_video); |
| 653 | |
| 654 | FakeConstraints updated_offer_c; |
| 655 | updated_offer_c.SetMandatoryReceiveAudio(true); |
| 656 | updated_offer_c.SetMandatoryReceiveVideo(true); |
| 657 | |
| 658 | cricket::MediaSessionOptions updated_offer_options; |
| 659 | EXPECT_TRUE(signaling_->GetOptionsForOffer(&updated_offer_c, |
| 660 | &updated_offer_options)); |
| 661 | EXPECT_TRUE(updated_offer_options.has_audio); |
| 662 | EXPECT_TRUE(updated_offer_options.has_video); |
| 663 | |
| 664 | // Since an offer has been created with both audio and video, subsequent |
| 665 | // offers and answers should contain both audio and video. |
| 666 | // Answers will only contain the media types that exist in the offer |
| 667 | // regardless of the value of |updated_answer_options.has_audio| and |
| 668 | // |updated_answer_options.has_video|. |
| 669 | FakeConstraints updated_answer_c; |
| 670 | answer_c.SetMandatoryReceiveAudio(false); |
| 671 | answer_c.SetMandatoryReceiveVideo(false); |
| 672 | |
| 673 | cricket::MediaSessionOptions updated_answer_options; |
| 674 | EXPECT_TRUE(signaling_->GetOptionsForAnswer(&updated_answer_c, |
| 675 | &updated_answer_options)); |
| 676 | EXPECT_TRUE(updated_answer_options.has_audio); |
| 677 | EXPECT_TRUE(updated_answer_options.has_video); |
| 678 | |
| 679 | EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, |
| 680 | &updated_offer_options)); |
| 681 | EXPECT_TRUE(updated_offer_options.has_audio); |
| 682 | EXPECT_TRUE(updated_offer_options.has_video); |
| 683 | } |
| 684 | |
| 685 | // This test verifies that the remote MediaStreams corresponding to a received |
| 686 | // SDP string is created. In this test the two separate MediaStreams are |
| 687 | // signaled. |
| 688 | TEST_F(MediaStreamSignalingTest, UpdateRemoteStreams) { |
| 689 | talk_base::scoped_ptr<SessionDescriptionInterface> desc( |
| 690 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 691 | kSdpStringWithStream1, NULL)); |
| 692 | EXPECT_TRUE(desc != NULL); |
| 693 | signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 694 | |
| 695 | talk_base::scoped_refptr<StreamCollection> reference( |
| 696 | CreateStreamCollection(1)); |
| 697 | EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 698 | reference.get())); |
| 699 | EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 700 | reference.get())); |
| 701 | EXPECT_EQ(1u, observer_->NumberOfRemoteAudioTracks()); |
| 702 | observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 703 | EXPECT_EQ(1u, observer_->NumberOfRemoteVideoTracks()); |
| 704 | observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2); |
wu@webrtc.org | 967bfff | 2013-09-19 05:49:50 +0000 | [diff] [blame] | 705 | ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 706 | MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 707 | EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != NULL); |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 708 | |
| 709 | // Create a session description based on another SDP with another |
| 710 | // MediaStream. |
| 711 | talk_base::scoped_ptr<SessionDescriptionInterface> update_desc( |
| 712 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 713 | kSdpStringWith2Stream, NULL)); |
| 714 | EXPECT_TRUE(update_desc != NULL); |
| 715 | signaling_->OnRemoteDescriptionChanged(update_desc.get()); |
| 716 | |
| 717 | talk_base::scoped_refptr<StreamCollection> reference2( |
| 718 | CreateStreamCollection(2)); |
| 719 | EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 720 | reference2.get())); |
| 721 | EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 722 | reference2.get())); |
| 723 | |
| 724 | EXPECT_EQ(2u, observer_->NumberOfRemoteAudioTracks()); |
| 725 | observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 726 | observer_->VerifyRemoteAudioTrack(kStreams[1], kAudioTracks[1], 3); |
| 727 | EXPECT_EQ(2u, observer_->NumberOfRemoteVideoTracks()); |
| 728 | observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 729 | observer_->VerifyRemoteVideoTrack(kStreams[1], kVideoTracks[1], 4); |
| 730 | } |
| 731 | |
| 732 | // This test verifies that the remote MediaStreams corresponding to a received |
| 733 | // SDP string is created. In this test the same remote MediaStream is signaled |
| 734 | // but MediaStream tracks are added and removed. |
| 735 | TEST_F(MediaStreamSignalingTest, AddRemoveTrackFromExistingRemoteMediaStream) { |
| 736 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms1; |
| 737 | CreateSessionDescriptionAndReference(1, 1, desc_ms1.use()); |
| 738 | signaling_->OnRemoteDescriptionChanged(desc_ms1.get()); |
| 739 | EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 740 | reference_collection_)); |
| 741 | |
| 742 | // Add extra audio and video tracks to the same MediaStream. |
| 743 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks; |
| 744 | CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.use()); |
| 745 | signaling_->OnRemoteDescriptionChanged(desc_ms1_two_tracks.get()); |
| 746 | EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 747 | reference_collection_)); |
| 748 | EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 749 | reference_collection_)); |
| 750 | |
| 751 | // Remove the extra audio and video tracks again. |
henrike@webrtc.org | 723d683 | 2013-07-12 16:04:50 +0000 | [diff] [blame] | 752 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms2; |
| 753 | CreateSessionDescriptionAndReference(1, 1, desc_ms2.use()); |
| 754 | signaling_->OnRemoteDescriptionChanged(desc_ms2.get()); |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 755 | EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 756 | reference_collection_)); |
| 757 | EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 758 | reference_collection_)); |
| 759 | } |
| 760 | |
| 761 | // This test that remote tracks are ended if a |
| 762 | // local session description is set that rejects the media content type. |
| 763 | TEST_F(MediaStreamSignalingTest, RejectMediaContent) { |
| 764 | talk_base::scoped_ptr<SessionDescriptionInterface> desc( |
| 765 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 766 | kSdpStringWithStream1, NULL)); |
| 767 | EXPECT_TRUE(desc != NULL); |
| 768 | signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 769 | |
| 770 | ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 771 | MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 772 | ASSERT_EQ(1u, remote_stream->GetVideoTracks().size()); |
| 773 | ASSERT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 774 | |
| 775 | talk_base::scoped_refptr<webrtc::VideoTrackInterface> remote_video = |
| 776 | remote_stream->GetVideoTracks()[0]; |
| 777 | EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state()); |
| 778 | talk_base::scoped_refptr<webrtc::AudioTrackInterface> remote_audio = |
| 779 | remote_stream->GetAudioTracks()[0]; |
| 780 | EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state()); |
| 781 | |
| 782 | cricket::ContentInfo* video_info = |
| 783 | desc->description()->GetContentByName("video"); |
| 784 | ASSERT_TRUE(video_info != NULL); |
| 785 | video_info->rejected = true; |
| 786 | signaling_->OnLocalDescriptionChanged(desc.get()); |
| 787 | EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state()); |
| 788 | EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state()); |
| 789 | |
| 790 | cricket::ContentInfo* audio_info = |
| 791 | desc->description()->GetContentByName("audio"); |
| 792 | ASSERT_TRUE(audio_info != NULL); |
| 793 | audio_info->rejected = true; |
| 794 | signaling_->OnLocalDescriptionChanged(desc.get()); |
| 795 | EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state()); |
| 796 | } |
| 797 | |
henrike@webrtc.org | 7666db7 | 2013-08-22 14:45:42 +0000 | [diff] [blame] | 798 | // This test that it won't crash if the remote track as been removed outside |
| 799 | // of MediaStreamSignaling and then MediaStreamSignaling tries to reject |
| 800 | // this track. |
| 801 | TEST_F(MediaStreamSignalingTest, RemoveTrackThenRejectMediaContent) { |
| 802 | talk_base::scoped_ptr<SessionDescriptionInterface> desc( |
| 803 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 804 | kSdpStringWithStream1, NULL)); |
| 805 | EXPECT_TRUE(desc != NULL); |
| 806 | signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 807 | |
| 808 | MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 809 | remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]); |
| 810 | remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]); |
| 811 | |
| 812 | cricket::ContentInfo* video_info = |
| 813 | desc->description()->GetContentByName("video"); |
| 814 | video_info->rejected = true; |
| 815 | signaling_->OnLocalDescriptionChanged(desc.get()); |
| 816 | |
| 817 | cricket::ContentInfo* audio_info = |
| 818 | desc->description()->GetContentByName("audio"); |
| 819 | audio_info->rejected = true; |
| 820 | signaling_->OnLocalDescriptionChanged(desc.get()); |
| 821 | |
| 822 | // No crash is a pass. |
| 823 | } |
| 824 | |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 825 | // This tests that a default MediaStream is created if a remote session |
| 826 | // description doesn't contain any streams and no MSID support. |
| 827 | // It also tests that the default stream is updated if a video m-line is added |
| 828 | // in a subsequent session description. |
| 829 | TEST_F(MediaStreamSignalingTest, SdpWithoutMsidCreatesDefaultStream) { |
| 830 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_audio_only( |
| 831 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 832 | kSdpStringWithoutStreamsAudioOnly, |
| 833 | NULL)); |
| 834 | ASSERT_TRUE(desc_audio_only != NULL); |
| 835 | signaling_->OnRemoteDescriptionChanged(desc_audio_only.get()); |
| 836 | |
| 837 | EXPECT_EQ(1u, signaling_->remote_streams()->count()); |
| 838 | ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 839 | MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 840 | |
| 841 | EXPECT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 842 | EXPECT_EQ(0u, remote_stream->GetVideoTracks().size()); |
| 843 | EXPECT_EQ("default", remote_stream->label()); |
| 844 | |
| 845 | talk_base::scoped_ptr<SessionDescriptionInterface> desc( |
| 846 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 847 | kSdpStringWithoutStreams, NULL)); |
| 848 | ASSERT_TRUE(desc != NULL); |
| 849 | signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 850 | EXPECT_EQ(1u, signaling_->remote_streams()->count()); |
| 851 | ASSERT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 852 | EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id()); |
| 853 | ASSERT_EQ(1u, remote_stream->GetVideoTracks().size()); |
| 854 | EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id()); |
| 855 | observer_->VerifyRemoteAudioTrack("default", "defaulta0", 0); |
| 856 | observer_->VerifyRemoteVideoTrack("default", "defaultv0", 0); |
| 857 | } |
| 858 | |
henrike@webrtc.org | 7666db7 | 2013-08-22 14:45:42 +0000 | [diff] [blame] | 859 | // This tests that it won't crash when MediaStreamSignaling tries to remove |
| 860 | // a remote track that as already been removed from the mediastream. |
| 861 | TEST_F(MediaStreamSignalingTest, RemoveAlreadyGoneRemoteStream) { |
| 862 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_audio_only( |
| 863 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 864 | kSdpStringWithoutStreams, |
| 865 | NULL)); |
| 866 | ASSERT_TRUE(desc_audio_only != NULL); |
| 867 | signaling_->OnRemoteDescriptionChanged(desc_audio_only.get()); |
| 868 | MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 869 | remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]); |
| 870 | remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]); |
| 871 | |
| 872 | talk_base::scoped_ptr<SessionDescriptionInterface> desc( |
| 873 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 874 | kSdpStringWithoutStreams, NULL)); |
| 875 | ASSERT_TRUE(desc != NULL); |
| 876 | signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 877 | |
| 878 | // No crash is a pass. |
| 879 | } |
| 880 | |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 881 | // This tests that a default MediaStream is created if the remote session |
| 882 | // description doesn't contain any streams and don't contain an indication if |
| 883 | // MSID is supported. |
| 884 | TEST_F(MediaStreamSignalingTest, |
| 885 | SdpWithoutMsidAndStreamsCreatesDefaultStream) { |
| 886 | talk_base::scoped_ptr<SessionDescriptionInterface> desc( |
| 887 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 888 | kSdpStringWithoutStreams, |
| 889 | NULL)); |
| 890 | ASSERT_TRUE(desc != NULL); |
| 891 | signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 892 | |
| 893 | ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 894 | MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 895 | EXPECT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 896 | EXPECT_EQ(1u, remote_stream->GetVideoTracks().size()); |
| 897 | } |
| 898 | |
| 899 | // This tests that a default MediaStream is not created if the remote session |
| 900 | // description doesn't contain any streams but does support MSID. |
| 901 | TEST_F(MediaStreamSignalingTest, SdpWitMsidDontCreatesDefaultStream) { |
| 902 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_msid_without_streams( |
| 903 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 904 | kSdpStringWithMsidWithoutStreams, |
| 905 | NULL)); |
| 906 | signaling_->OnRemoteDescriptionChanged(desc_msid_without_streams.get()); |
| 907 | EXPECT_EQ(0u, observer_->remote_streams()->count()); |
| 908 | } |
| 909 | |
| 910 | // This test that a default MediaStream is not created if a remote session |
| 911 | // description is updated to not have any MediaStreams. |
| 912 | TEST_F(MediaStreamSignalingTest, VerifyDefaultStreamIsNotCreated) { |
| 913 | talk_base::scoped_ptr<SessionDescriptionInterface> desc( |
| 914 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 915 | kSdpStringWithStream1, |
| 916 | NULL)); |
| 917 | ASSERT_TRUE(desc != NULL); |
| 918 | signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 919 | talk_base::scoped_refptr<StreamCollection> reference( |
| 920 | CreateStreamCollection(1)); |
| 921 | EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 922 | reference.get())); |
| 923 | |
| 924 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_without_streams( |
| 925 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 926 | kSdpStringWithoutStreams, |
| 927 | NULL)); |
| 928 | signaling_->OnRemoteDescriptionChanged(desc_without_streams.get()); |
| 929 | EXPECT_EQ(0u, observer_->remote_streams()->count()); |
| 930 | } |
| 931 | |
| 932 | // This test that the correct MediaStreamSignalingObserver methods are called |
| 933 | // when MediaStreamSignaling::OnLocalDescriptionChanged is called with an |
| 934 | // updated local session description. |
| 935 | TEST_F(MediaStreamSignalingTest, LocalDescriptionChanged) { |
| 936 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_1; |
| 937 | CreateSessionDescriptionAndReference(2, 2, desc_1.use()); |
| 938 | |
| 939 | signaling_->AddLocalStream(reference_collection_->at(0)); |
| 940 | signaling_->OnLocalDescriptionChanged(desc_1.get()); |
| 941 | EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks()); |
| 942 | EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks()); |
| 943 | observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 944 | observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 945 | observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3); |
| 946 | observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4); |
| 947 | |
| 948 | // Remove an audio and video track. |
| 949 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_2; |
| 950 | CreateSessionDescriptionAndReference(1, 1, desc_2.use()); |
| 951 | signaling_->OnLocalDescriptionChanged(desc_2.get()); |
| 952 | EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); |
| 953 | EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); |
| 954 | observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 955 | observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 956 | } |
| 957 | |
| 958 | // This test that the correct MediaStreamSignalingObserver methods are called |
| 959 | // when MediaStreamSignaling::AddLocalStream is called after |
| 960 | // MediaStreamSignaling::OnLocalDescriptionChanged is called. |
| 961 | TEST_F(MediaStreamSignalingTest, AddLocalStreamAfterLocalDescriptionChanged) { |
| 962 | talk_base::scoped_ptr<SessionDescriptionInterface> desc_1; |
| 963 | CreateSessionDescriptionAndReference(2, 2, desc_1.use()); |
| 964 | |
| 965 | signaling_->OnLocalDescriptionChanged(desc_1.get()); |
| 966 | EXPECT_EQ(0u, observer_->NumberOfLocalAudioTracks()); |
| 967 | EXPECT_EQ(0u, observer_->NumberOfLocalVideoTracks()); |
| 968 | |
| 969 | signaling_->AddLocalStream(reference_collection_->at(0)); |
| 970 | EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks()); |
| 971 | EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks()); |
| 972 | observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 973 | observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 974 | observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3); |
| 975 | observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4); |
| 976 | } |
| 977 | |
| 978 | // This test that the correct MediaStreamSignalingObserver methods are called |
| 979 | // if the ssrc on a local track is changed when |
| 980 | // MediaStreamSignaling::OnLocalDescriptionChanged is called. |
| 981 | TEST_F(MediaStreamSignalingTest, ChangeSsrcOnTrackInLocalSessionDescription) { |
| 982 | talk_base::scoped_ptr<SessionDescriptionInterface> desc; |
| 983 | CreateSessionDescriptionAndReference(1, 1, desc.use()); |
| 984 | |
| 985 | signaling_->AddLocalStream(reference_collection_->at(0)); |
| 986 | signaling_->OnLocalDescriptionChanged(desc.get()); |
| 987 | EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); |
| 988 | EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); |
| 989 | observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 990 | observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 991 | |
| 992 | // Change the ssrc of the audio and video track. |
| 993 | std::string sdp; |
| 994 | desc->ToString(&sdp); |
| 995 | std::string ssrc_org = "a=ssrc:1"; |
| 996 | std::string ssrc_to = "a=ssrc:97"; |
| 997 | talk_base::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), |
| 998 | ssrc_to.c_str(), ssrc_to.length(), |
| 999 | &sdp); |
| 1000 | ssrc_org = "a=ssrc:2"; |
| 1001 | ssrc_to = "a=ssrc:98"; |
| 1002 | talk_base::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), |
| 1003 | ssrc_to.c_str(), ssrc_to.length(), |
| 1004 | &sdp); |
| 1005 | talk_base::scoped_ptr<SessionDescriptionInterface> updated_desc( |
| 1006 | webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 1007 | sdp, NULL)); |
| 1008 | |
| 1009 | signaling_->OnLocalDescriptionChanged(updated_desc.get()); |
| 1010 | EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); |
| 1011 | EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); |
| 1012 | observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 97); |
| 1013 | observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 98); |
| 1014 | } |
| 1015 | |
wu@webrtc.org | cecfd18 | 2013-10-30 05:18:12 +0000 | [diff] [blame] | 1016 | // Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for |
| 1017 | // SSL_SERVER. |
| 1018 | TEST_F(MediaStreamSignalingTest, SctpIdAllocationBasedOnRole) { |
| 1019 | int id; |
| 1020 | ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER, &id)); |
| 1021 | EXPECT_EQ(1, id); |
| 1022 | ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT, &id)); |
| 1023 | EXPECT_EQ(0, id); |
| 1024 | ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER, &id)); |
| 1025 | EXPECT_EQ(3, id); |
| 1026 | ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT, &id)); |
| 1027 | EXPECT_EQ(2, id); |
| 1028 | } |
henrike@webrtc.org | 28e2075 | 2013-07-10 00:45:36 +0000 | [diff] [blame] | 1029 | |
wu@webrtc.org | cecfd18 | 2013-10-30 05:18:12 +0000 | [diff] [blame] | 1030 | // Verifies that SCTP ids of existing DataChannels are not reused. |
| 1031 | TEST_F(MediaStreamSignalingTest, SctpIdAllocationNoReuse) { |
| 1032 | talk_base::scoped_ptr<FakeDataChannelProvider> provider( |
| 1033 | new FakeDataChannelProvider()); |
| 1034 | // Creates a DataChannel with id 1. |
| 1035 | webrtc::DataChannelInit config; |
| 1036 | config.id = 1; |
| 1037 | talk_base::scoped_refptr<webrtc::DataChannel> data_channel( |
| 1038 | webrtc::DataChannel::Create( |
| 1039 | provider.get(), cricket::DCT_SCTP, "a", &config)); |
| 1040 | ASSERT_TRUE(data_channel.get() != NULL); |
| 1041 | ASSERT_TRUE(signaling_->AddDataChannel(data_channel.get())); |
| 1042 | |
| 1043 | int new_id; |
| 1044 | ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER, &new_id)); |
| 1045 | EXPECT_NE(config.id, new_id); |
| 1046 | |
| 1047 | // Creates a DataChannel with id 0. |
| 1048 | config.id = 0; |
| 1049 | data_channel = webrtc::DataChannel::Create( |
| 1050 | provider.get(), cricket::DCT_SCTP, "b", &config); |
| 1051 | ASSERT_TRUE(data_channel.get() != NULL); |
| 1052 | ASSERT_TRUE(signaling_->AddDataChannel(data_channel.get())); |
| 1053 | ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT, &new_id)); |
| 1054 | EXPECT_NE(config.id, new_id); |
| 1055 | } |