gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. |
| 3 | * |
| 4 | * Use of this source code is governed by a BSD-style license |
| 5 | * that can be found in the LICENSE file in the root of the source |
| 6 | * tree. An additional intellectual property rights grant can be found |
| 7 | * in the file PATENTS. All contributing project authors may |
| 8 | * be found in the AUTHORS file in the root of the source tree. |
| 9 | */ |
| 10 | |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 11 | #include "examples/unityplugin/simple_peer_connection.h" |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 12 | |
| 13 | #include <utility> |
| 14 | |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 15 | #include "api/test/fakeconstraints.h" |
| 16 | #include "api/videosourceproxy.h" |
| 17 | #include "media/engine/webrtcvideocapturerfactory.h" |
| 18 | #include "modules/video_capture/video_capture_factory.h" |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 19 | |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 20 | #if defined(WEBRTC_ANDROID) |
Mirko Bonadei | 92ea95e | 2017-09-15 06:47:31 +0200 | [diff] [blame] | 21 | #include "examples/unityplugin/classreferenceholder.h" |
| 22 | #include "sdk/android/src/jni/androidvideotracksource.h" |
| 23 | #include "sdk/android/src/jni/jni_helpers.h" |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 24 | #endif |
| 25 | |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 26 | // Names used for media stream labels. |
| 27 | const char kAudioLabel[] = "audio_label"; |
| 28 | const char kVideoLabel[] = "video_label"; |
| 29 | const char kStreamLabel[] = "stream_label"; |
| 30 | |
| 31 | namespace { |
| 32 | static int g_peer_count = 0; |
| 33 | static std::unique_ptr<rtc::Thread> g_worker_thread; |
| 34 | static std::unique_ptr<rtc::Thread> g_signaling_thread; |
| 35 | static rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> |
| 36 | g_peer_connection_factory; |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 37 | #if defined(WEBRTC_ANDROID) |
| 38 | // Android case: the video track does not own the capturer, and it |
| 39 | // relies on the app to dispose the capturer when the peerconnection |
| 40 | // shuts down. |
| 41 | static jobject g_camera = nullptr; |
| 42 | #endif |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 43 | |
| 44 | std::string GetEnvVarOrDefault(const char* env_var_name, |
| 45 | const char* default_value) { |
| 46 | std::string value; |
| 47 | const char* env_var = getenv(env_var_name); |
| 48 | if (env_var) |
| 49 | value = env_var; |
| 50 | |
| 51 | if (value.empty()) |
| 52 | value = default_value; |
| 53 | |
| 54 | return value; |
| 55 | } |
| 56 | |
| 57 | std::string GetPeerConnectionString() { |
| 58 | return GetEnvVarOrDefault("WEBRTC_CONNECT", "stun:stun.l.google.com:19302"); |
| 59 | } |
| 60 | |
| 61 | class DummySetSessionDescriptionObserver |
| 62 | : public webrtc::SetSessionDescriptionObserver { |
| 63 | public: |
| 64 | static DummySetSessionDescriptionObserver* Create() { |
| 65 | return new rtc::RefCountedObject<DummySetSessionDescriptionObserver>(); |
| 66 | } |
| 67 | virtual void OnSuccess() { LOG(INFO) << __FUNCTION__; } |
| 68 | virtual void OnFailure(const std::string& error) { |
| 69 | LOG(INFO) << __FUNCTION__ << " " << error; |
| 70 | } |
| 71 | |
| 72 | protected: |
| 73 | DummySetSessionDescriptionObserver() {} |
| 74 | ~DummySetSessionDescriptionObserver() {} |
| 75 | }; |
| 76 | |
| 77 | } // namespace |
| 78 | |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 79 | bool SimplePeerConnection::InitializePeerConnection(const char** turn_urls, |
| 80 | const int no_of_urls, |
| 81 | const char* username, |
| 82 | const char* credential, |
| 83 | bool is_receiver) { |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 84 | RTC_DCHECK(peer_connection_.get() == nullptr); |
| 85 | |
| 86 | if (g_peer_connection_factory == nullptr) { |
| 87 | g_worker_thread.reset(new rtc::Thread()); |
| 88 | g_worker_thread->Start(); |
| 89 | g_signaling_thread.reset(new rtc::Thread()); |
| 90 | g_signaling_thread->Start(); |
| 91 | |
| 92 | g_peer_connection_factory = webrtc::CreatePeerConnectionFactory( |
| 93 | g_worker_thread.get(), g_worker_thread.get(), g_signaling_thread.get(), |
| 94 | nullptr, nullptr, nullptr); |
| 95 | } |
| 96 | if (!g_peer_connection_factory.get()) { |
| 97 | DeletePeerConnection(); |
| 98 | return false; |
| 99 | } |
| 100 | |
| 101 | g_peer_count++; |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 102 | if (!CreatePeerConnection(turn_urls, no_of_urls, username, credential, |
| 103 | is_receiver)) { |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 104 | DeletePeerConnection(); |
| 105 | return false; |
| 106 | } |
| 107 | return peer_connection_.get() != nullptr; |
| 108 | } |
| 109 | |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 110 | bool SimplePeerConnection::CreatePeerConnection(const char** turn_urls, |
| 111 | const int no_of_urls, |
| 112 | const char* username, |
| 113 | const char* credential, |
| 114 | bool is_receiver) { |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 115 | RTC_DCHECK(g_peer_connection_factory.get() != nullptr); |
| 116 | RTC_DCHECK(peer_connection_.get() == nullptr); |
| 117 | |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 118 | local_video_observer_.reset(new VideoObserver()); |
| 119 | remote_video_observer_.reset(new VideoObserver()); |
| 120 | |
| 121 | // Add the turn server. |
| 122 | if (turn_urls != nullptr) { |
| 123 | if (no_of_urls > 0) { |
| 124 | webrtc::PeerConnectionInterface::IceServer turn_server; |
| 125 | for (int i = 0; i < no_of_urls; i++) { |
| 126 | std::string url(turn_urls[i]); |
| 127 | if (url.length() > 0) |
| 128 | turn_server.urls.push_back(turn_urls[i]); |
| 129 | } |
| 130 | |
| 131 | std::string user_name(username); |
| 132 | if (user_name.length() > 0) |
| 133 | turn_server.username = username; |
| 134 | |
| 135 | std::string password(credential); |
| 136 | if (password.length() > 0) |
| 137 | turn_server.password = credential; |
| 138 | |
| 139 | config_.servers.push_back(turn_server); |
| 140 | } |
| 141 | } |
| 142 | |
| 143 | // Add the stun server. |
| 144 | webrtc::PeerConnectionInterface::IceServer stun_server; |
| 145 | stun_server.uri = GetPeerConnectionString(); |
| 146 | config_.servers.push_back(stun_server); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 147 | |
| 148 | webrtc::FakeConstraints constraints; |
| 149 | constraints.SetAllowDtlsSctpDataChannels(); |
| 150 | |
| 151 | if (is_receiver) { |
| 152 | constraints.SetMandatoryReceiveAudio(true); |
| 153 | constraints.SetMandatoryReceiveVideo(true); |
| 154 | } |
| 155 | |
| 156 | peer_connection_ = g_peer_connection_factory->CreatePeerConnection( |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 157 | config_, &constraints, nullptr, nullptr, this); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 158 | |
| 159 | return peer_connection_.get() != nullptr; |
| 160 | } |
| 161 | |
| 162 | void SimplePeerConnection::DeletePeerConnection() { |
| 163 | g_peer_count--; |
| 164 | |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 165 | #if defined(WEBRTC_ANDROID) |
| 166 | if (g_camera) { |
magjed | a3d4f68 | 2017-08-28 16:24:06 -0700 | [diff] [blame] | 167 | JNIEnv* env = webrtc::jni::GetEnv(); |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 168 | jclass pc_factory_class = |
| 169 | unity_plugin::FindClass(env, "org/webrtc/UnityUtility"); |
magjed | a3d4f68 | 2017-08-28 16:24:06 -0700 | [diff] [blame] | 170 | jmethodID stop_camera_method = webrtc::jni::GetStaticMethodID( |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 171 | env, pc_factory_class, "StopCamera", "(Lorg/webrtc/VideoCapturer;)V"); |
| 172 | |
| 173 | env->CallStaticVoidMethod(pc_factory_class, stop_camera_method, g_camera); |
| 174 | CHECK_EXCEPTION(env); |
| 175 | |
| 176 | g_camera = nullptr; |
| 177 | } |
| 178 | #endif |
| 179 | |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 180 | CloseDataChannel(); |
| 181 | peer_connection_ = nullptr; |
| 182 | active_streams_.clear(); |
| 183 | |
| 184 | if (g_peer_count == 0) { |
| 185 | g_peer_connection_factory = nullptr; |
| 186 | g_signaling_thread.reset(); |
| 187 | g_worker_thread.reset(); |
| 188 | } |
| 189 | } |
| 190 | |
| 191 | bool SimplePeerConnection::CreateOffer() { |
| 192 | if (!peer_connection_.get()) |
| 193 | return false; |
| 194 | |
| 195 | peer_connection_->CreateOffer(this, nullptr); |
| 196 | return true; |
| 197 | } |
| 198 | |
| 199 | bool SimplePeerConnection::CreateAnswer() { |
| 200 | if (!peer_connection_.get()) |
| 201 | return false; |
| 202 | |
| 203 | peer_connection_->CreateAnswer(this, nullptr); |
| 204 | return true; |
| 205 | } |
| 206 | |
| 207 | void SimplePeerConnection::OnSuccess( |
| 208 | webrtc::SessionDescriptionInterface* desc) { |
| 209 | peer_connection_->SetLocalDescription( |
| 210 | DummySetSessionDescriptionObserver::Create(), desc); |
| 211 | |
| 212 | std::string sdp; |
| 213 | desc->ToString(&sdp); |
| 214 | |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 215 | if (OnLocalSdpReady) |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 216 | OnLocalSdpReady(desc->type().c_str(), sdp.c_str()); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 217 | } |
| 218 | |
| 219 | void SimplePeerConnection::OnFailure(const std::string& error) { |
| 220 | LOG(LERROR) << error; |
| 221 | |
| 222 | if (OnFailureMessage) |
| 223 | OnFailureMessage(error.c_str()); |
| 224 | } |
| 225 | |
| 226 | void SimplePeerConnection::OnIceCandidate( |
| 227 | const webrtc::IceCandidateInterface* candidate) { |
| 228 | LOG(INFO) << __FUNCTION__ << " " << candidate->sdp_mline_index(); |
| 229 | |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 230 | std::string sdp; |
| 231 | if (!candidate->ToString(&sdp)) { |
| 232 | LOG(LS_ERROR) << "Failed to serialize candidate"; |
| 233 | return; |
| 234 | } |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 235 | |
| 236 | if (OnIceCandiateReady) |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 237 | OnIceCandiateReady(sdp.c_str(), candidate->sdp_mline_index(), |
| 238 | candidate->sdp_mid().c_str()); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 239 | } |
| 240 | |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 241 | void SimplePeerConnection::RegisterOnLocalI420FrameReady( |
| 242 | I420FRAMEREADY_CALLBACK callback) { |
| 243 | if (local_video_observer_) |
| 244 | local_video_observer_->SetVideoCallback(callback); |
| 245 | } |
| 246 | |
| 247 | void SimplePeerConnection::RegisterOnRemoteI420FrameReady( |
| 248 | I420FRAMEREADY_CALLBACK callback) { |
| 249 | if (remote_video_observer_) |
| 250 | remote_video_observer_->SetVideoCallback(callback); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 251 | } |
| 252 | |
| 253 | void SimplePeerConnection::RegisterOnLocalDataChannelReady( |
| 254 | LOCALDATACHANNELREADY_CALLBACK callback) { |
| 255 | OnLocalDataChannelReady = callback; |
| 256 | } |
| 257 | |
| 258 | void SimplePeerConnection::RegisterOnDataFromDataChannelReady( |
| 259 | DATAFROMEDATECHANNELREADY_CALLBACK callback) { |
| 260 | OnDataFromDataChannelReady = callback; |
| 261 | } |
| 262 | |
| 263 | void SimplePeerConnection::RegisterOnFailure(FAILURE_CALLBACK callback) { |
| 264 | OnFailureMessage = callback; |
| 265 | } |
| 266 | |
| 267 | void SimplePeerConnection::RegisterOnAudioBusReady( |
| 268 | AUDIOBUSREADY_CALLBACK callback) { |
| 269 | OnAudioReady = callback; |
| 270 | } |
| 271 | |
| 272 | void SimplePeerConnection::RegisterOnLocalSdpReadytoSend( |
| 273 | LOCALSDPREADYTOSEND_CALLBACK callback) { |
| 274 | OnLocalSdpReady = callback; |
| 275 | } |
| 276 | |
| 277 | void SimplePeerConnection::RegisterOnIceCandiateReadytoSend( |
| 278 | ICECANDIDATEREADYTOSEND_CALLBACK callback) { |
| 279 | OnIceCandiateReady = callback; |
| 280 | } |
| 281 | |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 282 | bool SimplePeerConnection::SetRemoteDescription(const char* type, |
| 283 | const char* sdp) { |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 284 | if (!peer_connection_) |
| 285 | return false; |
| 286 | |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 287 | std::string remote_desc(sdp); |
| 288 | std::string sdp_type(type); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 289 | webrtc::SdpParseError error; |
| 290 | webrtc::SessionDescriptionInterface* session_description( |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 291 | webrtc::CreateSessionDescription(sdp_type, remote_desc, &error)); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 292 | if (!session_description) { |
| 293 | LOG(WARNING) << "Can't parse received session description message. " |
| 294 | << "SdpParseError was: " << error.description; |
| 295 | return false; |
| 296 | } |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 297 | LOG(INFO) << " Received session description :" << remote_desc; |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 298 | peer_connection_->SetRemoteDescription( |
| 299 | DummySetSessionDescriptionObserver::Create(), session_description); |
| 300 | |
| 301 | return true; |
| 302 | } |
| 303 | |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 304 | bool SimplePeerConnection::AddIceCandidate(const char* candidate, |
| 305 | const int sdp_mlineindex, |
| 306 | const char* sdp_mid) { |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 307 | if (!peer_connection_) |
| 308 | return false; |
| 309 | |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 310 | webrtc::SdpParseError error; |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 311 | std::unique_ptr<webrtc::IceCandidateInterface> ice_candidate( |
| 312 | webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, candidate, &error)); |
| 313 | if (!ice_candidate.get()) { |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 314 | LOG(WARNING) << "Can't parse received candidate message. " |
| 315 | << "SdpParseError was: " << error.description; |
| 316 | return false; |
| 317 | } |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 318 | if (!peer_connection_->AddIceCandidate(ice_candidate.get())) { |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 319 | LOG(WARNING) << "Failed to apply the received candidate"; |
| 320 | return false; |
| 321 | } |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 322 | LOG(INFO) << " Received candidate :" << candidate; |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 323 | return true; |
| 324 | } |
| 325 | |
| 326 | void SimplePeerConnection::SetAudioControl(bool is_mute, bool is_record) { |
| 327 | is_mute_audio_ = is_mute; |
| 328 | is_record_audio_ = is_record; |
| 329 | |
| 330 | SetAudioControl(); |
| 331 | } |
| 332 | |
| 333 | void SimplePeerConnection::SetAudioControl() { |
| 334 | if (!remote_stream_) |
| 335 | return; |
| 336 | webrtc::AudioTrackVector tracks = remote_stream_->GetAudioTracks(); |
| 337 | if (tracks.empty()) |
| 338 | return; |
| 339 | |
| 340 | webrtc::AudioTrackInterface* audio_track = tracks[0]; |
| 341 | std::string id = audio_track->id(); |
| 342 | if (is_record_audio_) |
| 343 | audio_track->AddSink(this); |
| 344 | else |
| 345 | audio_track->RemoveSink(this); |
| 346 | |
| 347 | for (auto& track : tracks) { |
| 348 | if (is_mute_audio_) |
| 349 | track->set_enabled(false); |
| 350 | else |
| 351 | track->set_enabled(true); |
| 352 | } |
| 353 | } |
| 354 | |
| 355 | void SimplePeerConnection::OnAddStream( |
| 356 | rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) { |
| 357 | LOG(INFO) << __FUNCTION__ << " " << stream->label(); |
| 358 | remote_stream_ = stream; |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 359 | if (remote_video_observer_ && !remote_stream_->GetVideoTracks().empty()) { |
| 360 | remote_stream_->GetVideoTracks()[0]->AddOrUpdateSink( |
| 361 | remote_video_observer_.get(), rtc::VideoSinkWants()); |
| 362 | } |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 363 | SetAudioControl(); |
| 364 | } |
| 365 | |
| 366 | std::unique_ptr<cricket::VideoCapturer> |
| 367 | SimplePeerConnection::OpenVideoCaptureDevice() { |
| 368 | std::vector<std::string> device_names; |
| 369 | { |
| 370 | std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info( |
| 371 | webrtc::VideoCaptureFactory::CreateDeviceInfo()); |
| 372 | if (!info) { |
| 373 | return nullptr; |
| 374 | } |
| 375 | int num_devices = info->NumberOfDevices(); |
| 376 | for (int i = 0; i < num_devices; ++i) { |
| 377 | const uint32_t kSize = 256; |
| 378 | char name[kSize] = {0}; |
| 379 | char id[kSize] = {0}; |
| 380 | if (info->GetDeviceName(i, name, kSize, id, kSize) != -1) { |
| 381 | device_names.push_back(name); |
| 382 | } |
| 383 | } |
| 384 | } |
| 385 | |
| 386 | cricket::WebRtcVideoDeviceCapturerFactory factory; |
| 387 | std::unique_ptr<cricket::VideoCapturer> capturer; |
| 388 | for (const auto& name : device_names) { |
| 389 | capturer = factory.Create(cricket::Device(name, 0)); |
| 390 | if (capturer) { |
| 391 | break; |
| 392 | } |
| 393 | } |
| 394 | return capturer; |
| 395 | } |
| 396 | |
| 397 | void SimplePeerConnection::AddStreams(bool audio_only) { |
| 398 | if (active_streams_.find(kStreamLabel) != active_streams_.end()) |
| 399 | return; // Already added. |
| 400 | |
| 401 | rtc::scoped_refptr<webrtc::MediaStreamInterface> stream = |
| 402 | g_peer_connection_factory->CreateLocalMediaStream(kStreamLabel); |
| 403 | |
| 404 | rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
| 405 | g_peer_connection_factory->CreateAudioTrack( |
| 406 | kAudioLabel, g_peer_connection_factory->CreateAudioSource(nullptr))); |
| 407 | std::string id = audio_track->id(); |
| 408 | stream->AddTrack(audio_track); |
| 409 | |
| 410 | if (!audio_only) { |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 411 | #if defined(WEBRTC_ANDROID) |
magjed | a3d4f68 | 2017-08-28 16:24:06 -0700 | [diff] [blame] | 412 | JNIEnv* env = webrtc::jni::GetEnv(); |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 413 | jclass pc_factory_class = |
| 414 | unity_plugin::FindClass(env, "org/webrtc/UnityUtility"); |
magjed | a3d4f68 | 2017-08-28 16:24:06 -0700 | [diff] [blame] | 415 | jmethodID load_texture_helper_method = webrtc::jni::GetStaticMethodID( |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 416 | env, pc_factory_class, "LoadSurfaceTextureHelper", |
| 417 | "()Lorg/webrtc/SurfaceTextureHelper;"); |
| 418 | jobject texture_helper = env->CallStaticObjectMethod( |
| 419 | pc_factory_class, load_texture_helper_method); |
| 420 | CHECK_EXCEPTION(env); |
| 421 | RTC_DCHECK(texture_helper != nullptr) |
| 422 | << "Cannot get the Surface Texture Helper."; |
| 423 | |
magjed | a3d4f68 | 2017-08-28 16:24:06 -0700 | [diff] [blame] | 424 | rtc::scoped_refptr<AndroidVideoTrackSource> source( |
| 425 | new rtc::RefCountedObject<AndroidVideoTrackSource>( |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 426 | g_signaling_thread.get(), env, texture_helper, false)); |
| 427 | rtc::scoped_refptr<webrtc::VideoTrackSourceProxy> proxy_source = |
| 428 | webrtc::VideoTrackSourceProxy::Create(g_signaling_thread.get(), |
| 429 | g_worker_thread.get(), source); |
| 430 | |
| 431 | // link with VideoCapturer (Camera); |
magjed | a3d4f68 | 2017-08-28 16:24:06 -0700 | [diff] [blame] | 432 | jmethodID link_camera_method = webrtc::jni::GetStaticMethodID( |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 433 | env, pc_factory_class, "LinkCamera", |
| 434 | "(JLorg/webrtc/SurfaceTextureHelper;)Lorg/webrtc/VideoCapturer;"); |
| 435 | jobject camera_tmp = |
| 436 | env->CallStaticObjectMethod(pc_factory_class, link_camera_method, |
| 437 | (jlong)proxy_source.get(), texture_helper); |
| 438 | CHECK_EXCEPTION(env); |
| 439 | g_camera = (jobject)env->NewGlobalRef(camera_tmp); |
| 440 | |
| 441 | rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| 442 | g_peer_connection_factory->CreateVideoTrack(kVideoLabel, |
| 443 | proxy_source.release())); |
| 444 | stream->AddTrack(video_track); |
| 445 | #else |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 446 | std::unique_ptr<cricket::VideoCapturer> capture = OpenVideoCaptureDevice(); |
| 447 | if (capture) { |
| 448 | rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| 449 | g_peer_connection_factory->CreateVideoTrack( |
| 450 | kVideoLabel, g_peer_connection_factory->CreateVideoSource( |
gyzhou | b38f386 | 2017-07-25 16:04:31 -0700 | [diff] [blame] | 451 | std::move(capture), nullptr))); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 452 | |
| 453 | stream->AddTrack(video_track); |
qiangchen | 42f96d5 | 2017-08-08 17:08:03 -0700 | [diff] [blame] | 454 | } |
| 455 | #endif |
| 456 | if (local_video_observer_ && !stream->GetVideoTracks().empty()) { |
| 457 | stream->GetVideoTracks()[0]->AddOrUpdateSink(local_video_observer_.get(), |
| 458 | rtc::VideoSinkWants()); |
gyzhou | ad7cad8 | 2017-05-11 16:10:03 -0700 | [diff] [blame] | 459 | } |
| 460 | } |
| 461 | |
| 462 | if (!peer_connection_->AddStream(stream)) { |
| 463 | LOG(LS_ERROR) << "Adding stream to PeerConnection failed"; |
| 464 | } |
| 465 | |
| 466 | typedef std::pair<std::string, |
| 467 | rtc::scoped_refptr<webrtc::MediaStreamInterface>> |
| 468 | MediaStreamPair; |
| 469 | active_streams_.insert(MediaStreamPair(stream->label(), stream)); |
| 470 | } |
| 471 | |
| 472 | bool SimplePeerConnection::CreateDataChannel() { |
| 473 | struct webrtc::DataChannelInit init; |
| 474 | init.ordered = true; |
| 475 | init.reliable = true; |
| 476 | data_channel_ = peer_connection_->CreateDataChannel("Hello", &init); |
| 477 | if (data_channel_.get()) { |
| 478 | data_channel_->RegisterObserver(this); |
| 479 | LOG(LS_INFO) << "Succeeds to create data channel"; |
| 480 | return true; |
| 481 | } else { |
| 482 | LOG(LS_INFO) << "Fails to create data channel"; |
| 483 | return false; |
| 484 | } |
| 485 | } |
| 486 | |
| 487 | void SimplePeerConnection::CloseDataChannel() { |
| 488 | if (data_channel_.get()) { |
| 489 | data_channel_->UnregisterObserver(); |
| 490 | data_channel_->Close(); |
| 491 | } |
| 492 | data_channel_ = nullptr; |
| 493 | } |
| 494 | |
| 495 | bool SimplePeerConnection::SendDataViaDataChannel(const std::string& data) { |
| 496 | if (!data_channel_.get()) { |
| 497 | LOG(LS_INFO) << "Data channel is not established"; |
| 498 | return false; |
| 499 | } |
| 500 | webrtc::DataBuffer buffer(data); |
| 501 | data_channel_->Send(buffer); |
| 502 | return true; |
| 503 | } |
| 504 | |
| 505 | // Peerconnection observer |
| 506 | void SimplePeerConnection::OnDataChannel( |
| 507 | rtc::scoped_refptr<webrtc::DataChannelInterface> channel) { |
| 508 | channel->RegisterObserver(this); |
| 509 | } |
| 510 | |
| 511 | void SimplePeerConnection::OnStateChange() { |
| 512 | if (data_channel_) { |
| 513 | webrtc::DataChannelInterface::DataState state = data_channel_->state(); |
| 514 | if (state == webrtc::DataChannelInterface::kOpen) { |
| 515 | if (OnLocalDataChannelReady) |
| 516 | OnLocalDataChannelReady(); |
| 517 | LOG(LS_INFO) << "Data channel is open"; |
| 518 | } |
| 519 | } |
| 520 | } |
| 521 | |
| 522 | // A data buffer was successfully received. |
| 523 | void SimplePeerConnection::OnMessage(const webrtc::DataBuffer& buffer) { |
| 524 | size_t size = buffer.data.size(); |
| 525 | char* msg = new char[size + 1]; |
| 526 | memcpy(msg, buffer.data.data(), size); |
| 527 | msg[size] = 0; |
| 528 | if (OnDataFromDataChannelReady) |
| 529 | OnDataFromDataChannelReady(msg); |
| 530 | delete[] msg; |
| 531 | } |
| 532 | |
| 533 | // AudioTrackSinkInterface implementation. |
| 534 | void SimplePeerConnection::OnData(const void* audio_data, |
| 535 | int bits_per_sample, |
| 536 | int sample_rate, |
| 537 | size_t number_of_channels, |
| 538 | size_t number_of_frames) { |
| 539 | if (OnAudioReady) |
| 540 | OnAudioReady(audio_data, bits_per_sample, sample_rate, |
| 541 | static_cast<int>(number_of_channels), |
| 542 | static_cast<int>(number_of_frames)); |
| 543 | } |
| 544 | |
| 545 | std::vector<uint32_t> SimplePeerConnection::GetRemoteAudioTrackSsrcs() { |
| 546 | std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> receivers = |
| 547 | peer_connection_->GetReceivers(); |
| 548 | |
| 549 | std::vector<uint32_t> ssrcs; |
| 550 | for (const auto& receiver : receivers) { |
| 551 | if (receiver->media_type() != cricket::MEDIA_TYPE_AUDIO) |
| 552 | continue; |
| 553 | |
| 554 | std::vector<webrtc::RtpEncodingParameters> params = |
| 555 | receiver->GetParameters().encodings; |
| 556 | |
| 557 | for (const auto& param : params) { |
| 558 | uint32_t ssrc = param.ssrc.value_or(0); |
| 559 | if (ssrc > 0) |
| 560 | ssrcs.push_back(ssrc); |
| 561 | } |
| 562 | } |
| 563 | |
| 564 | return ssrcs; |
| 565 | } |