Replace MapWrapper with std::map<>.
MapWrapper was needed on some platforms where STL wasn't supported, we
now use std::map<> directly.
BUG=2164
TEST=trybots
R=henrike@webrtc.org, phoglund@webrtc.org, stefan@webrtc.org, wu@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/2001004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@4530 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h b/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
index ba04b5a..663be18 100644
--- a/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
+++ b/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
@@ -12,7 +12,6 @@
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -87,19 +86,6 @@
AudioMixerOutputReceiver() {}
virtual ~AudioMixerOutputReceiver() {}
};
-
-class AudioRelayReceiver
-{
-public:
- // This callback function provides the mix decision for this mix iteration.
- // mixerList is a list of elements of the type
- // [int,MixerParticipant*]
- virtual void NewAudioToRelay(const int32_t id,
- const MapWrapper& mixerList) = 0;
-protected:
- AudioRelayReceiver() {}
- virtual ~AudioRelayReceiver() {}
-};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
diff --git a/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
index ab9d803..da16814 100644
--- a/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
+++ b/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
@@ -14,7 +14,6 @@
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/utility/interface/audio_frame_operations.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -240,7 +239,7 @@
ListWrapper mixList;
ListWrapper rampOutList;
ListWrapper additionalFramesList;
- MapWrapper mixedParticipantsMap;
+ std::map<int, MixerParticipant*> mixedParticipantsMap;
{
CriticalSectionScoped cs(_cbCrit.get());
@@ -297,19 +296,15 @@
}
}
- UpdateToMix(mixList, rampOutList, mixedParticipantsMap,
+ UpdateToMix(mixList, rampOutList, &mixedParticipantsMap,
remainingParticipantsAllowedToMix);
GetAdditionalAudio(additionalFramesList);
UpdateMixedStatus(mixedParticipantsMap);
- _scratchParticipantsToMixAmount = mixedParticipantsMap.Size();
+ _scratchParticipantsToMixAmount =
+ static_cast<uint32_t>(mixedParticipantsMap.size());
}
- // Clear mixedParticipantsMap to avoid memory leak warning.
- // Please note that the mixedParticipantsMap doesn't own any dynamically
- // allocated memory.
- while(mixedParticipantsMap.Erase(mixedParticipantsMap.First()) == 0) {}
-
// Get an AudioFrame for mixing from the memory pool.
AudioFrame* mixedAudio = NULL;
if(_audioFramePool->PopMemory(mixedAudio) == -1)
@@ -718,9 +713,8 @@
void AudioConferenceMixerImpl::UpdateToMix(
ListWrapper& mixList,
ListWrapper& rampOutList,
- MapWrapper& mixParticipantList,
- uint32_t& maxAudioFrameCounter)
-{
+ std::map<int, MixerParticipant*>* mixParticipantList,
+ uint32_t& maxAudioFrameCounter) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"UpdateToMix(mixList,rampOutList,mixParticipantList,%d)",
maxAudioFrameCounter);
@@ -811,39 +805,30 @@
replaceItem->GetItem());
bool replaceWasMixed = false;
- MapItem* replaceParticipant = mixParticipantList.Find(
- replaceFrame->id_);
+ std::map<int, MixerParticipant*>::iterator it =
+ mixParticipantList->find(replaceFrame->id_);
+
// When a frame is pushed to |activeList| it is also pushed
// to mixParticipantList with the frame's id. This means
// that the Find call above should never fail.
- if(replaceParticipant == NULL)
- {
- assert(false);
+ assert(it != mixParticipantList->end());
+ it->second->_mixHistory->WasMixed(replaceWasMixed);
+
+ mixParticipantList->erase(replaceFrame->id_);
+ activeList.Erase(replaceItem);
+
+ activeList.PushFront(static_cast<void*>(audioFrame));
+ (*mixParticipantList)[audioFrame->id_] = participant;
+ assert(mixParticipantList->size() <=
+ kMaximumAmountOfMixedParticipants);
+
+ if (replaceWasMixed) {
+ RampOut(*replaceFrame);
+ rampOutList.PushBack(static_cast<void*>(replaceFrame));
+ assert(rampOutList.GetSize() <=
+ kMaximumAmountOfMixedParticipants);
} else {
- static_cast<MixerParticipant*>(
- replaceParticipant->GetItem())->_mixHistory->
- WasMixed(replaceWasMixed);
-
- mixParticipantList.Erase(replaceFrame->id_);
- activeList.Erase(replaceItem);
-
- activeList.PushFront(static_cast<void*>(audioFrame));
- mixParticipantList.Insert(
- audioFrame->id_,
- static_cast<void*>(participant));
- assert(mixParticipantList.Size() <=
- kMaximumAmountOfMixedParticipants);
-
- if(replaceWasMixed)
- {
- RampOut(*replaceFrame);
- rampOutList.PushBack(
- static_cast<void*>(replaceFrame));
- assert(rampOutList.GetSize() <=
- kMaximumAmountOfMixedParticipants);
- } else {
- _audioFramePool->PushMemory(replaceFrame);
- }
+ _audioFramePool->PushMemory(replaceFrame);
}
} else {
if(wasMixed)
@@ -858,9 +843,8 @@
}
} else {
activeList.PushFront(static_cast<void*>(audioFrame));
- mixParticipantList.Insert(audioFrame->id_,
- static_cast<void*>(participant));
- assert(mixParticipantList.Size() <=
+ (*mixParticipantList)[audioFrame->id_] = participant;
+ assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
}
} else {
@@ -902,9 +886,9 @@
if(mixList.GetSize() < maxAudioFrameCounter + mixListStartSize)
{
mixList.PushBack(pair->audioFrame);
- mixParticipantList.Insert(pair->audioFrame->id_,
- static_cast<void*>(pair->participant));
- assert(mixParticipantList.Size() <=
+ (*mixParticipantList)[pair->audioFrame->id_] =
+ pair->participant;
+ assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
}
else
@@ -923,9 +907,8 @@
if(mixList.GetSize() < maxAudioFrameCounter + mixListStartSize)
{
mixList.PushBack(pair->audioFrame);
- mixParticipantList.Insert(pair->audioFrame->id_,
- static_cast<void*>(pair->participant));
- assert(mixParticipantList.Size() <=
+ (*mixParticipantList)[pair->audioFrame->id_] = pair->participant;
+ assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
}
else
@@ -983,11 +966,11 @@
}
void AudioConferenceMixerImpl::UpdateMixedStatus(
- MapWrapper& mixedParticipantsMap)
+ std::map<int, MixerParticipant*>& mixedParticipantsMap)
{
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"UpdateMixedStatus(mixedParticipantsMap)");
- assert(mixedParticipantsMap.Size() <= kMaximumAmountOfMixedParticipants);
+ assert(mixedParticipantsMap.size() <= kMaximumAmountOfMixedParticipants);
// Loop through all participants. If they are in the mix map they
// were mixed.
@@ -998,15 +981,14 @@
MixerParticipant* participant =
static_cast<MixerParticipant*>(participantItem->GetItem());
- MapItem* mixedItem = mixedParticipantsMap.First();
- while(mixedItem)
- {
- if(participant == mixedItem->GetItem())
- {
- isMixed = true;
- break;
- }
- mixedItem = mixedParticipantsMap.Next(mixedItem);
+ for (std::map<int, MixerParticipant*>::iterator it =
+ mixedParticipantsMap.begin();
+ it != mixedParticipantsMap.end();
+ ++it) {
+ if (it->second == participant) {
+ isMixed = true;
+ break;
+ }
}
participant->_mixHistory->SetIsMixed(isMixed);
participantItem = _participantList.Next(participantItem);
diff --git a/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
index 261cd88..737acbb 100644
--- a/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
+++ b/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
@@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
+#include <map>
+
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h"
#include "webrtc/modules/audio_conference_mixer/source/level_indicator.h"
@@ -99,9 +101,11 @@
// rampOutList contain AudioFrames corresponding to an audio stream that
// used to be mixed but shouldn't be mixed any longer. These AudioFrames
// should be ramped out over this AudioFrame to avoid audio discontinuities.
- void UpdateToMix(ListWrapper& mixList, ListWrapper& rampOutList,
- MapWrapper& mixParticipantList,
- uint32_t& maxAudioFrameCounter);
+ void UpdateToMix(
+ ListWrapper& mixList,
+ ListWrapper& rampOutList,
+ std::map<int, MixerParticipant*>* mixParticipantList,
+ uint32_t& maxAudioFrameCounter);
// Return the lowest mixing frequency that can be used without having to
// downsample any audio.
@@ -113,7 +117,8 @@
// Update the MixHistory of all MixerParticipants. mixedParticipantsList
// should contain a map of MixerParticipants that have been mixed.
- void UpdateMixedStatus(MapWrapper& mixedParticipantsList);
+ void UpdateMixedStatus(
+ std::map<int, MixerParticipant*>& mixedParticipantsList);
// Clears audioFrameList and reclaims all memory associated with it.
void ClearAudioFrameList(ListWrapper& audioFrameList);
diff --git a/webrtc/modules/video_capture/android/device_info_android.cc b/webrtc/modules/video_capture/android/device_info_android.cc
index b221fd9..3c153af 100644
--- a/webrtc/modules/video_capture/android/device_info_android.cc
+++ b/webrtc/modules/video_capture/android/device_info_android.cc
@@ -161,11 +161,12 @@
int32_t DeviceInfoAndroid::CreateCapabilityMap(
const char* deviceUniqueIdUTF8) {
- MapItem* item = NULL;
- while ((item = _captureCapabilities.Last())) {
- delete (VideoCaptureCapability*) item->GetItem();
- _captureCapabilities.Erase(item);
- }
+ for (std::map<int, VideoCaptureCapability*>::iterator it =
+ _captureCapabilities.begin();
+ it != _captureCapabilities.end();
+ ++it)
+ delete it->second;
+ _captureCapabilities.clear();
JNIEnv *env;
jclass javaCmDevInfoClass;
@@ -247,7 +248,7 @@
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"%s: Cap width %d, height %d, fps %d", __FUNCTION__,
cap->width, cap->height, cap->maxFPS);
- _captureCapabilities.Insert(i, cap);
+ _captureCapabilities[i] = cap;
}
_lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
@@ -259,9 +260,9 @@
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
- "CreateCapabilityMap %d", _captureCapabilities.Size());
+ "CreateCapabilityMap %d", _captureCapabilities.size());
- return _captureCapabilities.Size();
+ return _captureCapabilities.size();
}
int32_t DeviceInfoAndroid::GetOrientation(
diff --git a/webrtc/modules/video_capture/device_info_impl.cc b/webrtc/modules/video_capture/device_info_impl.cc
index c360cf3..1bdaa14 100644
--- a/webrtc/modules/video_capture/device_info_impl.cc
+++ b/webrtc/modules/video_capture/device_info_impl.cc
@@ -31,13 +31,13 @@
DeviceInfoImpl::~DeviceInfoImpl(void)
{
_apiLock.AcquireLockExclusive();
- // Reset old capability list
- MapItem* item = NULL;
- while ((item = _captureCapabilities.Last()))
- {
- delete (VideoCaptureCapability*) item->GetItem();
- _captureCapabilities.Erase(item);
+
+ for (VideoCaptureCapabilityMap::iterator it = _captureCapabilities.begin();
+ it != _captureCapabilities.end();
+ ++it) {
+ delete it->second;
}
+
free(_lastUsedDeviceName);
_apiLock.ReleaseLockExclusive();
@@ -67,7 +67,7 @@
{
//yes
_apiLock.ReleaseLockShared();
- return _captureCapabilities.Size();
+ return static_cast<int32_t>(_captureCapabilities.size());
}
}
// Need to get exclusive rights to create the new capability map.
@@ -116,7 +116,7 @@
}
// Make sure the number is valid
- if (deviceCapabilityNumber >= (unsigned int) _captureCapabilities.Size())
+ if (deviceCapabilityNumber >= (unsigned int) _captureCapabilities.size())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"deviceCapabilityNumber %d is invalid in call to GetCapability",
@@ -124,23 +124,23 @@
return -1;
}
- MapItem* item = _captureCapabilities.Find(deviceCapabilityNumber);
- if (!item)
+ VideoCaptureCapabilityMap::iterator item =
+ _captureCapabilities.find(deviceCapabilityNumber);
+
+ if (item == _captureCapabilities.end())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to find capability number %d of %d possible",
- deviceCapabilityNumber, _captureCapabilities.Size());
+ deviceCapabilityNumber, _captureCapabilities.size());
return -1;
}
- VideoCaptureCapability* capPointer = static_cast<VideoCaptureCapability*>
- (item->GetItem());
- if (!capPointer)
+ if (item->second == NULL)
{
return -1;
}
- capability = *capPointer;
+ capability = *item->second;
return 0;
}
@@ -183,16 +183,16 @@
RawVideoType bestRawType = kVideoUnknown;
webrtc::VideoCodecType bestCodecType = webrtc::kVideoCodecUnknown;
- const int32_t numberOfCapabilies = _captureCapabilities.Size();
+ const int32_t numberOfCapabilies =
+ static_cast<int32_t>(_captureCapabilities.size());
for (int32_t tmp = 0; tmp < numberOfCapabilies; ++tmp) // Loop through all capabilities
{
- MapItem* item = _captureCapabilities.Find(tmp);
- if (!item)
+ VideoCaptureCapabilityMap::iterator item = _captureCapabilities.find(tmp);
+ if (item == _captureCapabilities.end())
return -1;
- VideoCaptureCapability& capability = *static_cast<VideoCaptureCapability*>
- (item->GetItem());
+ VideoCaptureCapability& capability = *item->second;
const int32_t diffWidth = capability.width - requested.width;
const int32_t diffHeight = capability.height - requested.height;
@@ -298,15 +298,14 @@
bestWidth, bestHeight, bestFrameRate, bestRawType);
// Copy the capability
- MapItem* item = _captureCapabilities.Find(bestformatIndex);
- if (!item)
+ VideoCaptureCapabilityMap::iterator item =
+ _captureCapabilities.find(bestformatIndex);
+ if (item == _captureCapabilities.end())
return -1;
- VideoCaptureCapability* capPointer =
- static_cast<VideoCaptureCapability*> (item->GetItem());
- if (!capPointer)
+ if (item->second == NULL)
return -1;
- resulting = *capPointer;
+ resulting = *item->second;
return bestformatIndex;
}
diff --git a/webrtc/modules/video_capture/device_info_impl.h b/webrtc/modules/video_capture/device_info_impl.h
index 79cf527..dc4c08c 100644
--- a/webrtc/modules/video_capture/device_info_impl.h
+++ b/webrtc/modules/video_capture/device_info_impl.h
@@ -11,9 +11,10 @@
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+#include <map>
+
#include "webrtc/modules/video_capture/include/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_delay.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
namespace webrtc
@@ -57,7 +58,8 @@
protected:
// Data members
int32_t _id;
- MapWrapper _captureCapabilities;
+ typedef std::map<int, VideoCaptureCapability*> VideoCaptureCapabilityMap;
+ VideoCaptureCapabilityMap _captureCapabilities;
RWLockWrapper& _apiLock;
char* _lastUsedDeviceName;
uint32_t _lastUsedDeviceNameLength;
diff --git a/webrtc/modules/video_capture/linux/device_info_linux.cc b/webrtc/modules/video_capture/linux/device_info_linux.cc
index 511778d..0b16ca3 100644
--- a/webrtc/modules/video_capture/linux/device_info_linux.cc
+++ b/webrtc/modules/video_capture/linux/device_info_linux.cc
@@ -219,12 +219,13 @@
// now fd will point to the matching device
// reset old capability map
- MapItem* item = NULL;
- while ((item = _captureCapabilities.Last()))
- {
- delete static_cast<VideoCaptureCapability*> (item->GetItem());
- _captureCapabilities.Erase(item);
+ for (std::map<int, VideoCaptureCapability*>::iterator it =
+ _captureCapabilities.begin();
+ it != _captureCapabilities.end();
+ ++it) {
+ delete it->second;
}
+ _captureCapabilities.clear();
int size = FillCapabilityMap(fd);
close(fd);
@@ -235,8 +236,11 @@
_lastUsedDeviceNameLength + 1);
memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength + 1);
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "CreateCapabilityMap %d",
- _captureCapabilities.Size());
+ WEBRTC_TRACE(webrtc::kTraceInfo,
+ webrtc::kTraceVideoCapture,
+ _id,
+ "CreateCapabilityMap %u",
+ static_cast<unsigned int>(_captureCapabilities.size()));
return size;
}
@@ -314,7 +318,7 @@
cap->maxFPS = 30;
}
- _captureCapabilities.Insert(index, cap);
+ _captureCapabilities[index] = cap;
index++;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"Camera capability, width:%d height:%d type:%d fps:%d",
@@ -324,9 +328,12 @@
}
}
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "CreateCapabilityMap %d",
- _captureCapabilities.Size());
- return _captureCapabilities.Size();
+ WEBRTC_TRACE(webrtc::kTraceInfo,
+ webrtc::kTraceVideoCapture,
+ _id,
+ "CreateCapabilityMap %u",
+ static_cast<unsigned int>(_captureCapabilities.size()));
+ return _captureCapabilities.size();
}
} // namespace videocapturemodule
diff --git a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h
index cae923b..fd994ad 100644
--- a/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h
+++ b/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.h
@@ -14,7 +14,6 @@
#include "webrtc/modules/video_capture/device_info_impl.h"
#include "webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_utility.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
@class VideoCaptureMacQTKitInfoObjC;
diff --git a/webrtc/modules/video_capture/windows/device_info_ds.cc b/webrtc/modules/video_capture/windows/device_info_ds.cc
index f459802..11e1f41 100644
--- a/webrtc/modules/video_capture/windows/device_info_ds.cc
+++ b/webrtc/modules/video_capture/windows/device_info_ds.cc
@@ -367,23 +367,18 @@
}
int32_t DeviceInfoDS::GetWindowsCapability(
- const int32_t capabilityIndex,
- VideoCaptureCapabilityWindows& windowsCapability)
+ const int32_t capabilityIndex,
+ VideoCaptureCapabilityWindows& windowsCapability) {
+ ReadLockScoped cs(_apiLock);
-{
- ReadLockScoped cs(_apiLock);
- // Make sure the number is valid
- if (capabilityIndex >= _captureCapabilities.Size() || capabilityIndex < 0)
- return -1;
+ std::map<int, VideoCaptureCapability*>::iterator item =
+ _captureCapabilities.find(capabilityIndex);
+ if (item == _captureCapabilities.end())
+ return -1;
- MapItem* item = _captureCapabilities.Find(capabilityIndex);
- if (!item)
- return -1;
-
- VideoCaptureCapabilityWindows* capPointer =
- static_cast<VideoCaptureCapabilityWindows*> (item->GetItem());
- windowsCapability = *capPointer;
- return 0;
+ windowsCapability =
+ *static_cast<VideoCaptureCapabilityWindows*>(item->second);
+ return 0;
}
int32_t DeviceInfoDS::CreateCapabilityMap(
@@ -391,15 +386,15 @@
{
// Reset old capability list
- MapItem* item = NULL;
- while (item = _captureCapabilities.Last())
- {
- VideoCaptureCapabilityWindows* cap =
- static_cast<VideoCaptureCapabilityWindows*> (item->GetItem());
- delete cap;
- _captureCapabilities.Erase(item);
+ for (std::map<int, VideoCaptureCapability*>::iterator it =
+ _captureCapabilities.begin();
+ it != _captureCapabilities.end();
+ ++it) {
+ delete it->second;
}
+ _captureCapabilities.clear();
+
const int32_t deviceUniqueIdUTF8Length =
(int32_t) strlen((char*) deviceUniqueIdUTF8);
if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
@@ -678,7 +673,7 @@
productId,
capability->width,
capability->height);
- _captureCapabilities.Insert(index++, capability);
+ _captureCapabilities[index++] = capability;
WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"Camera capability, width:%d height:%d type:%d fps:%d",
capability->width, capability->height,
@@ -699,9 +694,9 @@
+ 1);
memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength+ 1);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
- "CreateCapabilityMap %d", _captureCapabilities.Size());
+ "CreateCapabilityMap %d", _captureCapabilities.size());
- return _captureCapabilities.Size();
+ return static_cast<int32_t>(_captureCapabilities.size());
}
/* Constructs a product ID from the Windows DevicePath. on a USB device the devicePath contains product id and vendor id.
diff --git a/webrtc/modules/video_capture/windows/device_info_ds.h b/webrtc/modules/video_capture/windows/device_info_ds.h
index 19961af..2230172 100644
--- a/webrtc/modules/video_capture/windows/device_info_ds.h
+++ b/webrtc/modules/video_capture/windows/device_info_ds.h
@@ -13,7 +13,6 @@
#include "webrtc/modules/video_capture/device_info_impl.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
#include <Dshow.h>
diff --git a/webrtc/modules/video_render/android/video_render_android_impl.cc b/webrtc/modules/video_render/android/video_render_android_impl.cc
index 7bd0919..27a264a 100644
--- a/webrtc/modules/video_render/android/video_render_android_impl.cc
+++ b/webrtc/modules/video_render/android/video_render_android_impl.cc
@@ -46,7 +46,6 @@
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderType(videoRenderType),
_ptrWindow((jobject)(window)),
- _streamsMap(),
_javaShutDownFlag(false),
_javaShutdownEvent(*EventWrapper::Create()),
_javaRenderEvent(*EventWrapper::Create()),
@@ -62,9 +61,10 @@
if (_javaRenderThread)
StopRender();
- for (MapItem* item = _streamsMap.First(); item != NULL; item
- = _streamsMap.Next(item)) { // Delete streams
- delete static_cast<AndroidStream*> (item->GetItem());
+ for (AndroidStreamMap::iterator it = _streamsMap.begin();
+ it != _streamsMap.end();
+ ++it) {
+ delete it->second;
}
delete &_javaShutdownEvent;
delete &_javaRenderEvent;
@@ -91,20 +91,20 @@
CriticalSectionScoped cs(&_critSect);
AndroidStream* renderStream = NULL;
- MapItem* item = _streamsMap.Find(streamId);
- if (item) {
- renderStream = (AndroidStream*) (item->GetItem());
- if (NULL != renderStream) {
- WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
- "%s: Render stream already exists", __FUNCTION__);
- return renderStream;
- }
+ AndroidStreamMap::iterator item = _streamsMap.find(streamId);
+ if (item != _streamsMap.end() && item->second != NULL) {
+ WEBRTC_TRACE(kTraceInfo,
+ kTraceVideoRenderer,
+ -1,
+ "%s: Render stream already exists",
+ __FUNCTION__);
+ return renderStream;
}
renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
right, bottom, *this);
if (renderStream) {
- _streamsMap.Insert(streamId, renderStream);
+ _streamsMap[streamId] = renderStream;
}
else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
@@ -118,16 +118,14 @@
const uint32_t streamId) {
CriticalSectionScoped cs(&_critSect);
- MapItem* item = _streamsMap.Find(streamId);
- if (item) {
- delete (AndroidStream*) item->GetItem();
- _streamsMap.Erase(streamId);
- }
- else {
+ AndroidStreamMap::iterator item = _streamsMap.find(streamId);
+ if (item == _streamsMap.end()) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return -1;
}
+ delete item->second;
+ _streamsMap.erase(item);
return 0;
}
@@ -234,10 +232,10 @@
}
}
- for (MapItem* item = _streamsMap.First(); item != NULL;
- item = _streamsMap.Next(item)) {
- static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
- _javaRenderJniEnv);
+ for (AndroidStreamMap::iterator it = _streamsMap.begin();
+ it != _streamsMap.end();
+ ++it) {
+ it->second->DeliverFrame(_javaRenderJniEnv);
}
if (_javaShutDownFlag) {
diff --git a/webrtc/modules/video_render/android/video_render_android_impl.h b/webrtc/modules/video_render/android/video_render_android_impl.h
index eac07d2..1541148 100644
--- a/webrtc/modules/video_render/android/video_render_android_impl.h
+++ b/webrtc/modules/video_render/android/video_render_android_impl.h
@@ -12,8 +12,10 @@
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#include <jni.h>
+
+#include <map>
+
#include "webrtc/modules/video_render/i_video_render.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
namespace webrtc {
@@ -136,7 +138,8 @@
bool JavaRenderThreadProcess();
// Map with streams to render.
- MapWrapper _streamsMap;
+ typedef std::map<int32_t, AndroidStream*> AndroidStreamMap;
+ AndroidStreamMap _streamsMap;
// True if the _javaRenderThread thread shall be detached from the JVM.
bool _javaShutDownFlag;
EventWrapper& _javaShutdownEvent;
diff --git a/webrtc/modules/video_render/incoming_video_stream.cc b/webrtc/modules/video_render/incoming_video_stream.cc
index 39556d8..71f30c3 100644
--- a/webrtc/modules/video_render/incoming_video_stream.cc
+++ b/webrtc/modules/video_render/incoming_video_stream.cc
@@ -25,7 +25,6 @@
#include "webrtc/modules/video_render//video_render_frames.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace.h"
diff --git a/webrtc/modules/video_render/incoming_video_stream.h b/webrtc/modules/video_render/incoming_video_stream.h
index 500ce26..87fe2de 100644
--- a/webrtc/modules/video_render/incoming_video_stream.h
+++ b/webrtc/modules/video_render/incoming_video_stream.h
@@ -12,7 +12,6 @@
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
#include "webrtc/modules/video_render/include/video_render.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
namespace webrtc {
class CriticalSectionWrapper;
diff --git a/webrtc/modules/video_render/video_render_impl.cc b/webrtc/modules/video_render/video_render_impl.cc
index 7dc0dbf..64467c1 100644
--- a/webrtc/modules/video_render/video_render_impl.cc
+++ b/webrtc/modules/video_render/video_render_impl.cc
@@ -96,8 +96,7 @@
void* window,
const bool fullscreen) :
_id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
- _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL),
- _streamRenderMap(*(new MapWrapper()))
+ _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
{
// Create platform specific renderer
@@ -222,16 +221,11 @@
{
delete &_moduleCrit;
- while (_streamRenderMap.Size() > 0)
- {
- MapItem* item = _streamRenderMap.First();
- IncomingVideoStream* ptrIncomingStream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- assert(ptrIncomingStream != NULL);
- delete ptrIncomingStream;
- _streamRenderMap.Erase(item);
+ for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
+ it != _streamRenderMap.end();
+ ++it) {
+ delete it->second;
}
- delete &_streamRenderMap;
// Delete platform specific renderer
if (_ptrRenderer)
@@ -410,29 +404,22 @@
return _id;
}
-uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(
- const uint32_t streamId)
-{
- CriticalSectionScoped cs(&_moduleCrit);
+uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
+ CriticalSectionScoped cs(&_moduleCrit);
- MapItem* mapItem = _streamRenderMap.Find(streamId);
- if (mapItem == NULL)
- {
- // This stream doesn't exist
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
- "%s: stream doesn't exist", __FUNCTION__);
- return 0;
- }
- IncomingVideoStream* incomingStream =
- static_cast<IncomingVideoStream*> (mapItem->GetItem());
- if (incomingStream == NULL)
- {
- // This should never happen
- assert(false);
- _streamRenderMap.Erase(mapItem);
- return 0;
- }
- return incomingStream->IncomingRate();
+ IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
+
+ if (it == _streamRenderMap.end()) {
+ // This stream doesn't exist
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideoRenderer,
+ _id,
+ "%s: stream doesn't exist",
+ __FUNCTION__);
+ return 0;
+ }
+ assert(it->second != NULL);
+ return it->second->IncomingRate();
}
VideoRenderCallback*
@@ -452,8 +439,7 @@
return NULL;
}
- if (_streamRenderMap.Find(streamId) != NULL)
- {
+ if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
// The stream already exists...
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream already exists", __FUNCTION__);
@@ -495,7 +481,7 @@
ptrIncomingStream->ModuleCallback();
// Store the stream
- _streamRenderMap.Insert(streamId, ptrIncomingStream);
+ _streamRenderMap[streamId] = ptrIncomingStream;
return moduleCallback;
}
@@ -512,56 +498,52 @@
return -1;
}
- MapItem* mapItem = _streamRenderMap.Find(streamId);
- if (!mapItem)
+ IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+ if (item == _streamRenderMap.end())
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
- IncomingVideoStream* ptrIncomingStream =
- static_cast<IncomingVideoStream*> (mapItem->GetItem());
- delete ptrIncomingStream;
- ptrIncomingStream = NULL;
+ delete item->second;
+
_ptrRenderer->DeleteIncomingRenderStream(streamId);
- _streamRenderMap.Erase(mapItem);
+
+ _streamRenderMap.erase(item);
return 0;
}
int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
- const uint32_t streamId,
- VideoRenderCallback* renderObject)
-{
+ const uint32_t streamId,
+ VideoRenderCallback* renderObject) {
CriticalSectionScoped cs(&_moduleCrit);
- MapItem* mapItem = _streamRenderMap.Find(streamId);
- if (!mapItem)
+ IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+ if (item == _streamRenderMap.end())
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
- IncomingVideoStream* ptrIncomingStream =
- static_cast<IncomingVideoStream*> (mapItem->GetItem());
- if (!ptrIncomingStream) {
+ if (item->second == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get stream", __FUNCTION__);
return -1;
}
- return ptrIncomingStream->SetExternalCallback(renderObject);
+ return item->second->SetExternalCallback(renderObject);
}
int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
- const uint32_t streamId,
- uint32_t& zOrder,
- float& left,
- float& top,
- float& right,
- float& bottom) const
-{
+ const uint32_t streamId,
+ uint32_t& zOrder,
+ float& left,
+ float& top,
+ float& right,
+ float& bottom) const {
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
@@ -580,27 +562,20 @@
{
CriticalSectionScoped cs(&_moduleCrit);
- return (uint32_t) _streamRenderMap.Size();
+ return static_cast<uint32_t>(_streamRenderMap.size());
}
bool ModuleVideoRenderImpl::HasIncomingRenderStream(
- const uint32_t streamId) const
-{
- CriticalSectionScoped cs(&_moduleCrit);
+ const uint32_t streamId) const {
+ CriticalSectionScoped cs(&_moduleCrit);
- bool hasStream = false;
- if (_streamRenderMap.Find(streamId) != NULL)
- {
- hasStream = true;
- }
- return hasStream;
+ return _streamRenderMap.find(streamId) != _streamRenderMap.end();
}
int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
- const uint32_t streamId,
- VideoRenderCallback* callbackObj)
-{
- return -1;
+ const uint32_t streamId,
+ VideoRenderCallback* callbackObj) {
+ return -1;
}
int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
@@ -615,15 +590,14 @@
}
// Start the stream
- MapItem* item = _streamRenderMap.Find(streamId);
- if (item == NULL)
+ IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+ if (item == _streamRenderMap.end())
{
return -1;
}
- IncomingVideoStream* incomingStream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- if (incomingStream->Start() == -1)
+ if (item->second->Start() == -1)
{
return -1;
}
@@ -648,15 +622,14 @@
}
// Stop the incoming stream
- MapItem* item = _streamRenderMap.Find(streamId);
- if (item == NULL)
+ IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+ if (item == _streamRenderMap.end())
{
return -1;
}
- IncomingVideoStream* incomingStream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- if (incomingStream->Stop() == -1)
+ if (item->second->Stop() == -1)
{
return -1;
}
@@ -668,21 +641,15 @@
{
CriticalSectionScoped cs(&_moduleCrit);
- int32_t error = 0;
-
- // Loop through all incoming streams and stop them
- MapItem* item = _streamRenderMap.First();
- while (item)
- {
- IncomingVideoStream* incomingStream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- if (incomingStream->Reset() == -1)
- {
- error = -1;
- }
- item = _streamRenderMap.Next(item);
+ int32_t ret = 0;
+ // Loop through all incoming streams and reset them
+ for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
+ it != _streamRenderMap.end();
+ ++it) {
+ if (it->second->Reset() == -1)
+ ret = -1;
}
- return error;
+ return ret;
}
RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
@@ -830,24 +797,18 @@
return -1;
}
- MapItem *item = _streamRenderMap.Find(streamId);
- if (item == NULL)
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(streamId);
+ if (item == _streamRenderMap.end())
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return 0;
}
- IncomingVideoStream* incomingStream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- if (incomingStream == NULL)
- {
- // This should never happen
- assert(false);
- _streamRenderMap.Erase(item);
- return 0;
- }
- return incomingStream->GetLastRenderedFrame(frame);
+
+ assert(item->second != NULL);
+ return item->second->GetLastRenderedFrame(frame);
}
int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
@@ -860,8 +821,9 @@
return false;
}
- MapItem *item = _streamRenderMap.Find(stream_id);
- if (item == NULL) {
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(stream_id);
+ if (item == _streamRenderMap.end()) {
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
@@ -869,16 +831,8 @@
return -1;
}
- IncomingVideoStream* incoming_stream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- if (incoming_stream == NULL) {
- // This should never happen
- assert(false);
- _streamRenderMap.Erase(item);
- return 0;
- }
-
- return incoming_stream->SetExpectedRenderDelay(delay_ms);
+ assert(item->second != NULL);
+ return item->second->SetExpectedRenderDelay(delay_ms);
}
int32_t ModuleVideoRenderImpl::ConfigureRenderer(
@@ -914,24 +868,17 @@
return -1;
}
- MapItem *item = _streamRenderMap.Find(streamId);
- if (item == NULL)
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(streamId);
+ if (item == _streamRenderMap.end())
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
- IncomingVideoStream* incomingStream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- if (incomingStream == NULL)
- {
- // This should never happen
- assert(false);
- _streamRenderMap.Erase(item);
- return 0;
- }
- return incomingStream->SetStartImage(videoFrame);
+ assert (item->second != NULL);
+ return item->second->SetStartImage(videoFrame);
}
@@ -949,24 +896,17 @@
return -1;
}
- MapItem *item = _streamRenderMap.Find(streamId);
- if (item == NULL)
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(streamId);
+ if (item == _streamRenderMap.end())
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
- IncomingVideoStream* incomingStream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- if (incomingStream == NULL)
- {
- // This should never happen
- assert(false);
- _streamRenderMap.Erase(item);
- return 0;
- }
- return incomingStream->SetTimeoutImage(videoFrame, timeout);
+ assert(item->second != NULL);
+ return item->second->SetTimeoutImage(videoFrame, timeout);
}
int32_t ModuleVideoRenderImpl::MirrorRenderStream(const int renderId,
@@ -983,25 +923,18 @@
return -1;
}
- MapItem *item = _streamRenderMap.Find(renderId);
- if (item == NULL)
+ IncomingVideoStreamMap::const_iterator item =
+ _streamRenderMap.find(renderId);
+ if (item == _streamRenderMap.end())
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return 0;
}
- IncomingVideoStream* incomingStream =
- static_cast<IncomingVideoStream*> (item->GetItem());
- if (incomingStream == NULL)
- {
- // This should never happen
- assert(false);
- _streamRenderMap.Erase(item);
- return 0;
- }
+ assert(item->second != NULL);
- return incomingStream->EnableMirroring(enable, mirrorXAxis, mirrorYAxis);
+ return item->second->EnableMirroring(enable, mirrorXAxis, mirrorYAxis);
}
} // namespace webrtc
diff --git a/webrtc/modules/video_render/video_render_impl.h b/webrtc/modules/video_render/video_render_impl.h
index c9e69e9..b8f8d67 100644
--- a/webrtc/modules/video_render/video_render_impl.h
+++ b/webrtc/modules/video_render/video_render_impl.h
@@ -11,15 +11,15 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
+#include <map>
+
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/include/video_render.h"
-#include "webrtc/system_wrappers/interface/map_wrapper.h"
namespace webrtc {
class CriticalSectionWrapper;
class IncomingVideoStream;
class IVideoRender;
-class MapWrapper;
// Class definitions
class ModuleVideoRenderImpl: public VideoRender
@@ -219,7 +219,8 @@
bool _fullScreen;
IVideoRender* _ptrRenderer;
- MapWrapper& _streamRenderMap;
+ typedef std::map<uint32_t, IncomingVideoStream*> IncomingVideoStreamMap;
+ IncomingVideoStreamMap _streamRenderMap;
};
} // namespace webrtc