WebRtc_Word32 -> int32_t in utility/
BUG=314
Review URL: https://webrtc-codereview.appspot.com/1307005
git-svn-id: http://webrtc.googlecode.com/svn/trunk@3797 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/modules/utility/source/coder.cc b/webrtc/modules/utility/source/coder.cc
index f023d22..85fb698 100644
--- a/webrtc/modules/utility/source/coder.cc
+++ b/webrtc/modules/utility/source/coder.cc
@@ -20,7 +20,7 @@
#endif
namespace webrtc {
-AudioCoder::AudioCoder(WebRtc_UWord32 instanceID)
+AudioCoder::AudioCoder(uint32_t instanceID)
: _acm(AudioCodingModule::Create(instanceID)),
_receiveCodec(),
_encodeTimestamp(0),
@@ -38,8 +38,8 @@
AudioCodingModule::Destroy(_acm);
}
-WebRtc_Word32 AudioCoder::SetEncodeCodec(const CodecInst& codecInst,
- ACMAMRPackingFormat amrFormat)
+int32_t AudioCoder::SetEncodeCodec(const CodecInst& codecInst,
+ ACMAMRPackingFormat amrFormat)
{
if(_acm->RegisterSendCodec((CodecInst&)codecInst) == -1)
{
@@ -48,8 +48,8 @@
return 0;
}
-WebRtc_Word32 AudioCoder::SetDecodeCodec(const CodecInst& codecInst,
- ACMAMRPackingFormat amrFormat)
+int32_t AudioCoder::SetDecodeCodec(const CodecInst& codecInst,
+ ACMAMRPackingFormat amrFormat)
{
if(_acm->RegisterReceiveCodec((CodecInst&)codecInst) == -1)
{
@@ -59,16 +59,16 @@
return 0;
}
-WebRtc_Word32 AudioCoder::Decode(AudioFrame& decodedAudio,
- WebRtc_UWord32 sampFreqHz,
- const WebRtc_Word8* incomingPayload,
- WebRtc_Word32 payloadLength)
+int32_t AudioCoder::Decode(AudioFrame& decodedAudio,
+ uint32_t sampFreqHz,
+ const int8_t* incomingPayload,
+ int32_t payloadLength)
{
if (payloadLength > 0)
{
- const WebRtc_UWord8 payloadType = _receiveCodec.pltype;
+ const uint8_t payloadType = _receiveCodec.pltype;
_decodeTimestamp += _receiveCodec.pacsize;
- if(_acm->IncomingPayload((const WebRtc_UWord8*) incomingPayload,
+ if(_acm->IncomingPayload((const uint8_t*) incomingPayload,
payloadLength,
payloadType,
_decodeTimestamp) == -1)
@@ -76,18 +76,18 @@
return -1;
}
}
- return _acm->PlayoutData10Ms((WebRtc_UWord16)sampFreqHz, &decodedAudio);
+ return _acm->PlayoutData10Ms((uint16_t)sampFreqHz, &decodedAudio);
}
-WebRtc_Word32 AudioCoder::PlayoutData(AudioFrame& decodedAudio,
- WebRtc_UWord16& sampFreqHz)
+int32_t AudioCoder::PlayoutData(AudioFrame& decodedAudio,
+ uint16_t& sampFreqHz)
{
return _acm->PlayoutData10Ms(sampFreqHz, &decodedAudio);
}
-WebRtc_Word32 AudioCoder::Encode(const AudioFrame& audio,
- WebRtc_Word8* encodedData,
- WebRtc_UWord32& encodedLengthInBytes)
+int32_t AudioCoder::Encode(const AudioFrame& audio,
+ int8_t* encodedData,
+ uint32_t& encodedLengthInBytes)
{
// Fake a timestamp in case audio doesn't contain a correct timestamp.
// Make a local copy of the audio frame since audio is const
@@ -112,15 +112,15 @@
return 0;
}
-WebRtc_Word32 AudioCoder::SendData(
+int32_t AudioCoder::SendData(
FrameType /* frameType */,
- WebRtc_UWord8 /* payloadType */,
- WebRtc_UWord32 /* timeStamp */,
- const WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize,
+ uint8_t /* payloadType */,
+ uint32_t /* timeStamp */,
+ const uint8_t* payloadData,
+ uint16_t payloadSize,
const RTPFragmentationHeader* /* fragmentation*/)
{
- memcpy(_encodedData,payloadData,sizeof(WebRtc_UWord8) * payloadSize);
+ memcpy(_encodedData,payloadData,sizeof(uint8_t) * payloadSize);
_encodedLengthInBytes = payloadSize;
return 0;
}
diff --git a/webrtc/modules/utility/source/coder.h b/webrtc/modules/utility/source/coder.h
index e7cbfb8..9dd2566 100644
--- a/webrtc/modules/utility/source/coder.h
+++ b/webrtc/modules/utility/source/coder.h
@@ -21,46 +21,43 @@
class AudioCoder : public AudioPacketizationCallback
{
public:
- AudioCoder(WebRtc_UWord32 instanceID);
+ AudioCoder(uint32_t instanceID);
~AudioCoder();
- WebRtc_Word32 SetEncodeCodec(
+ int32_t SetEncodeCodec(
const CodecInst& codecInst,
ACMAMRPackingFormat amrFormat = AMRBandwidthEfficient);
- WebRtc_Word32 SetDecodeCodec(
+ int32_t SetDecodeCodec(
const CodecInst& codecInst,
ACMAMRPackingFormat amrFormat = AMRBandwidthEfficient);
- WebRtc_Word32 Decode(AudioFrame& decodedAudio, WebRtc_UWord32 sampFreqHz,
- const WebRtc_Word8* incomingPayload,
- WebRtc_Word32 payloadLength);
+ int32_t Decode(AudioFrame& decodedAudio, uint32_t sampFreqHz,
+ const int8_t* incomingPayload, int32_t payloadLength);
- WebRtc_Word32 PlayoutData(AudioFrame& decodedAudio,
- WebRtc_UWord16& sampFreqHz);
+ int32_t PlayoutData(AudioFrame& decodedAudio, uint16_t& sampFreqHz);
- WebRtc_Word32 Encode(const AudioFrame& audio,
- WebRtc_Word8* encodedData,
- WebRtc_UWord32& encodedLengthInBytes);
+ int32_t Encode(const AudioFrame& audio, int8_t* encodedData,
+ uint32_t& encodedLengthInBytes);
protected:
- virtual WebRtc_Word32 SendData(FrameType frameType,
- WebRtc_UWord8 payloadType,
- WebRtc_UWord32 timeStamp,
- const WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize,
- const RTPFragmentationHeader* fragmentation);
+ virtual int32_t SendData(FrameType frameType,
+ uint8_t payloadType,
+ uint32_t timeStamp,
+ const uint8_t* payloadData,
+ uint16_t payloadSize,
+ const RTPFragmentationHeader* fragmentation);
private:
AudioCodingModule* _acm;
CodecInst _receiveCodec;
- WebRtc_UWord32 _encodeTimestamp;
- WebRtc_Word8* _encodedData;
- WebRtc_UWord32 _encodedLengthInBytes;
+ uint32_t _encodeTimestamp;
+ int8_t* _encodedData;
+ uint32_t _encodedLengthInBytes;
- WebRtc_UWord32 _decodeTimestamp;
+ uint32_t _decodeTimestamp;
};
} // namespace webrtc
diff --git a/webrtc/modules/utility/source/file_player_impl.cc b/webrtc/modules/utility/source/file_player_impl.cc
index 2ca205a..52ebe32 100644
--- a/webrtc/modules/utility/source/file_player_impl.cc
+++ b/webrtc/modules/utility/source/file_player_impl.cc
@@ -25,7 +25,7 @@
#endif
namespace webrtc {
-FilePlayer* FilePlayer::CreateFilePlayer(WebRtc_UWord32 instanceID,
+FilePlayer* FilePlayer::CreateFilePlayer(uint32_t instanceID,
FileFormats fileFormat)
{
switch(fileFormat)
@@ -57,7 +57,7 @@
delete player;
}
-FilePlayerImpl::FilePlayerImpl(const WebRtc_UWord32 instanceID,
+FilePlayerImpl::FilePlayerImpl(const uint32_t instanceID,
const FileFormats fileFormat)
: _instanceID(instanceID),
_fileFormat(fileFormat),
@@ -78,7 +78,7 @@
MediaFile::DestroyMediaFile(&_fileModule);
}
-WebRtc_Word32 FilePlayerImpl::Frequency() const
+int32_t FilePlayerImpl::Frequency() const
{
if(_codec.plfreq == 0)
{
@@ -108,13 +108,13 @@
}
}
-WebRtc_Word32 FilePlayerImpl::AudioCodec(CodecInst& audioCodec) const
+int32_t FilePlayerImpl::AudioCodec(CodecInst& audioCodec) const
{
audioCodec = _codec;
return 0;
}
-WebRtc_Word32 FilePlayerImpl::Get10msAudioFromFile(
+int32_t FilePlayerImpl::Get10msAudioFromFile(
int16_t* outBuffer,
int& lengthInSamples,
int frequencyInHz)
@@ -134,10 +134,10 @@
unresampledAudioFrame.sample_rate_hz_ = _codec.plfreq;
// L16 is un-encoded data. Just pull 10 ms.
- WebRtc_UWord32 lengthInBytes =
+ uint32_t lengthInBytes =
sizeof(unresampledAudioFrame.data_);
if (_fileModule.PlayoutAudioData(
- (WebRtc_Word8*)unresampledAudioFrame.data_,
+ (int8_t*)unresampledAudioFrame.data_,
lengthInBytes) == -1)
{
// End of file reached.
@@ -150,19 +150,19 @@
}
// One sample is two bytes.
unresampledAudioFrame.samples_per_channel_ =
- (WebRtc_UWord16)lengthInBytes >> 1;
+ (uint16_t)lengthInBytes >> 1;
}else {
// Decode will generate 10 ms of audio data. PlayoutAudioData(..)
// expects a full frame. If the frame size is larger than 10 ms,
// PlayoutAudioData(..) data should be called proportionally less often.
- WebRtc_Word16 encodedBuffer[MAX_AUDIO_BUFFER_IN_SAMPLES];
- WebRtc_UWord32 encodedLengthInBytes = 0;
+ int16_t encodedBuffer[MAX_AUDIO_BUFFER_IN_SAMPLES];
+ uint32_t encodedLengthInBytes = 0;
if(++_numberOf10MsInDecoder >= _numberOf10MsPerFrame)
{
_numberOf10MsInDecoder = 0;
- WebRtc_UWord32 bytesFromFile = sizeof(encodedBuffer);
- if (_fileModule.PlayoutAudioData((WebRtc_Word8*)encodedBuffer,
+ uint32_t bytesFromFile = sizeof(encodedBuffer);
+ if (_fileModule.PlayoutAudioData((int8_t*)encodedBuffer,
bytesFromFile) == -1)
{
// End of file reached.
@@ -171,7 +171,7 @@
encodedLengthInBytes = bytesFromFile;
}
if(_audioDecoder.Decode(unresampledAudioFrame,frequencyInHz,
- (WebRtc_Word8*)encodedBuffer,
+ (int8_t*)encodedBuffer,
encodedLengthInBytes) == -1)
{
return -1;
@@ -187,7 +187,7 @@
// New sampling frequency. Update state.
outLen = frequencyInHz / 100;
- memset(outBuffer, 0, outLen * sizeof(WebRtc_Word16));
+ memset(outBuffer, 0, outLen * sizeof(int16_t));
return 0;
}
_resampler.Push(unresampledAudioFrame.data_,
@@ -202,19 +202,19 @@
{
for (int i = 0;i < outLen; i++)
{
- outBuffer[i] = (WebRtc_Word16)(outBuffer[i] * _scaling);
+ outBuffer[i] = (int16_t)(outBuffer[i] * _scaling);
}
}
_decodedLengthInMS += 10;
return 0;
}
-WebRtc_Word32 FilePlayerImpl::RegisterModuleFileCallback(FileCallback* callback)
+int32_t FilePlayerImpl::RegisterModuleFileCallback(FileCallback* callback)
{
return _fileModule.SetModuleFileCallback(callback);
}
-WebRtc_Word32 FilePlayerImpl::SetAudioScaling(float scaleFactor)
+int32_t FilePlayerImpl::SetAudioScaling(float scaleFactor)
{
if((scaleFactor >= 0)&&(scaleFactor <= 2.0))
{
@@ -226,13 +226,13 @@
return -1;
}
-WebRtc_Word32 FilePlayerImpl::StartPlayingFile(const char* fileName,
- bool loop,
- WebRtc_UWord32 startPosition,
- float volumeScaling,
- WebRtc_UWord32 notification,
- WebRtc_UWord32 stopPosition,
- const CodecInst* codecInst)
+int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
+ bool loop,
+ uint32_t startPosition,
+ float volumeScaling,
+ uint32_t notification,
+ uint32_t stopPosition,
+ const CodecInst* codecInst)
{
if (_fileFormat == kFileFormatPcm16kHzFile ||
_fileFormat == kFileFormatPcm8kHzFile||
@@ -322,12 +322,12 @@
return 0;
}
-WebRtc_Word32 FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
- WebRtc_UWord32 startPosition,
- float volumeScaling,
- WebRtc_UWord32 notification,
- WebRtc_UWord32 stopPosition,
- const CodecInst* codecInst)
+int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
+ uint32_t startPosition,
+ float volumeScaling,
+ uint32_t notification,
+ uint32_t stopPosition,
+ const CodecInst* codecInst)
{
if (_fileFormat == kFileFormatPcm16kHzFile ||
_fileFormat == kFileFormatPcm32kHzFile ||
@@ -415,7 +415,7 @@
return 0;
}
-WebRtc_Word32 FilePlayerImpl::StopPlayingFile()
+int32_t FilePlayerImpl::StopPlayingFile()
{
memset(&_codec, 0, sizeof(CodecInst));
_numberOf10MsPerFrame = 0;
@@ -428,12 +428,12 @@
return _fileModule.IsPlaying();
}
-WebRtc_Word32 FilePlayerImpl::GetPlayoutPosition(WebRtc_UWord32& durationMs)
+int32_t FilePlayerImpl::GetPlayoutPosition(uint32_t& durationMs)
{
return _fileModule.PlayoutPositionMs(durationMs);
}
-WebRtc_Word32 FilePlayerImpl::SetUpAudioDecoder()
+int32_t FilePlayerImpl::SetUpAudioDecoder()
{
if ((_fileModule.codec_info(_codec) == -1))
{
@@ -462,7 +462,7 @@
}
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-VideoFilePlayerImpl::VideoFilePlayerImpl(WebRtc_UWord32 instanceID,
+VideoFilePlayerImpl::VideoFilePlayerImpl(uint32_t instanceID,
FileFormats fileFormat)
: FilePlayerImpl(instanceID,fileFormat),
_videoDecoder(*new VideoCoder(instanceID)),
@@ -488,7 +488,7 @@
delete &_encodedData;
}
-WebRtc_Word32 VideoFilePlayerImpl::StartPlayingVideoFile(
+int32_t VideoFilePlayerImpl::StartPlayingVideoFile(
const char* fileName,
bool loop,
bool videoOnly)
@@ -525,7 +525,7 @@
return 0;
}
-WebRtc_Word32 VideoFilePlayerImpl::StopPlayingFile()
+int32_t VideoFilePlayerImpl::StopPlayingFile()
{
CriticalSectionScoped lock( _critSec);
@@ -535,13 +535,13 @@
return FilePlayerImpl::StopPlayingFile();
}
-WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame,
- WebRtc_UWord32 outWidth,
- WebRtc_UWord32 outHeight)
+int32_t VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame,
+ uint32_t outWidth,
+ uint32_t outHeight)
{
CriticalSectionScoped lock( _critSec);
- WebRtc_Word32 retVal = GetVideoFromFile(videoFrame);
+ int32_t retVal = GetVideoFromFile(videoFrame);
if(retVal != 0)
{
return retVal;
@@ -554,7 +554,7 @@
return retVal;
}
-WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame)
+int32_t VideoFilePlayerImpl::GetVideoFromFile(I420VideoFrame& videoFrame)
{
CriticalSectionScoped lock( _critSec);
// No new video data read from file.
@@ -563,7 +563,7 @@
videoFrame.ResetSize();
return -1;
}
- WebRtc_Word32 retVal = 0;
+ int32_t retVal = 0;
if(strncmp(video_codec_info_.plName, "I420", 5) == 0)
{
int size_y = video_codec_info_.width * video_codec_info_.height;
@@ -588,7 +588,7 @@
retVal = _videoDecoder.Decode(videoFrame, _encodedData);
}
- WebRtc_Word64 renderTimeMs = TickTime::MillisecondTimestamp();
+ int64_t renderTimeMs = TickTime::MillisecondTimestamp();
videoFrame.set_render_time_ms(renderTimeMs);
// Indicate that the current frame in the encoded buffer is old/has
@@ -601,7 +601,7 @@
return retVal;
}
-WebRtc_Word32 VideoFilePlayerImpl::video_codec_info(
+int32_t VideoFilePlayerImpl::video_codec_info(
VideoCodec& videoCodec) const
{
if(video_codec_info_.plName[0] == 0)
@@ -612,7 +612,7 @@
return 0;
}
-WebRtc_Word32 VideoFilePlayerImpl::TimeUntilNextVideoFrame()
+int32_t VideoFilePlayerImpl::TimeUntilNextVideoFrame()
{
if(_fileFormat != kFileFormatAviFile)
{
@@ -630,9 +630,9 @@
if(_fileFormat == kFileFormatAviFile)
{
// Get next video frame
- WebRtc_UWord32 encodedBufferLengthInBytes = _encodedData.bufferSize;
+ uint32_t encodedBufferLengthInBytes = _encodedData.bufferSize;
if(_fileModule.PlayoutAVIVideoData(
- reinterpret_cast< WebRtc_Word8*>(_encodedData.payloadData),
+ reinterpret_cast< int8_t*>(_encodedData.payloadData),
encodedBufferLengthInBytes) != 0)
{
WEBRTC_TRACE(
@@ -659,7 +659,7 @@
// Frame rate is in frames per seconds. Frame length is
// calculated as an integer division which means it may
// be rounded down. Compensate for this every second.
- WebRtc_UWord32 rest = 1000%_frameLengthMS;
+ uint32_t rest = 1000%_frameLengthMS;
_accumulatedRenderTimeMs += rest;
}
_accumulatedRenderTimeMs += _frameLengthMS;
@@ -667,7 +667,7 @@
}
}
- WebRtc_Word64 timeToNextFrame;
+ int64_t timeToNextFrame;
if(_videoOnly)
{
timeToNextFrame = _accumulatedRenderTimeMs -
@@ -686,10 +686,10 @@
// Wraparound or audio stream has gone to far ahead of the video stream.
return -1;
}
- return static_cast<WebRtc_Word32>(timeToNextFrame);
+ return static_cast<int32_t>(timeToNextFrame);
}
-WebRtc_Word32 VideoFilePlayerImpl::SetUpVideoDecoder()
+int32_t VideoFilePlayerImpl::SetUpVideoDecoder()
{
if (_fileModule.VideoCodecInst(video_codec_info_) != 0)
{
@@ -702,7 +702,7 @@
return -1;
}
- WebRtc_Word32 useNumberOfCores = 1;
+ int32_t useNumberOfCores = 1;
if(_videoDecoder.SetDecodeCodec(video_codec_info_, useNumberOfCores) != 0)
{
WEBRTC_TRACE(
@@ -718,7 +718,7 @@
// Size of unencoded data (I420) should be the largest possible frame size
// in a file.
- const WebRtc_UWord32 KReadBufferSize = 3 * video_codec_info_.width *
+ const uint32_t KReadBufferSize = 3 * video_codec_info_.width *
video_codec_info_.height / 2;
_encodedData.VerifyAndAllocate(KReadBufferSize);
_encodedData.encodedHeight = video_codec_info_.height;
diff --git a/webrtc/modules/utility/source/file_player_impl.h b/webrtc/modules/utility/source/file_player_impl.h
index c188e23..ebf0da1 100644
--- a/webrtc/modules/utility/source/file_player_impl.h
+++ b/webrtc/modules/utility/source/file_player_impl.h
@@ -29,51 +29,51 @@
class FilePlayerImpl : public FilePlayer
{
public:
- FilePlayerImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+ FilePlayerImpl(uint32_t instanceID, FileFormats fileFormat);
~FilePlayerImpl();
virtual int Get10msAudioFromFile(
int16_t* outBuffer,
int& lengthInSamples,
int frequencyInHz);
- virtual WebRtc_Word32 RegisterModuleFileCallback(FileCallback* callback);
- virtual WebRtc_Word32 StartPlayingFile(
+ virtual int32_t RegisterModuleFileCallback(FileCallback* callback);
+ virtual int32_t StartPlayingFile(
const char* fileName,
bool loop,
- WebRtc_UWord32 startPosition,
+ uint32_t startPosition,
float volumeScaling,
- WebRtc_UWord32 notification,
- WebRtc_UWord32 stopPosition = 0,
+ uint32_t notification,
+ uint32_t stopPosition = 0,
const CodecInst* codecInst = NULL);
- virtual WebRtc_Word32 StartPlayingFile(
+ virtual int32_t StartPlayingFile(
InStream& sourceStream,
- WebRtc_UWord32 startPosition,
+ uint32_t startPosition,
float volumeScaling,
- WebRtc_UWord32 notification,
- WebRtc_UWord32 stopPosition = 0,
+ uint32_t notification,
+ uint32_t stopPosition = 0,
const CodecInst* codecInst = NULL);
- virtual WebRtc_Word32 StopPlayingFile();
+ virtual int32_t StopPlayingFile();
virtual bool IsPlayingFile() const;
- virtual WebRtc_Word32 GetPlayoutPosition(WebRtc_UWord32& durationMs);
- virtual WebRtc_Word32 AudioCodec(CodecInst& audioCodec) const;
- virtual WebRtc_Word32 Frequency() const;
- virtual WebRtc_Word32 SetAudioScaling(float scaleFactor);
+ virtual int32_t GetPlayoutPosition(uint32_t& durationMs);
+ virtual int32_t AudioCodec(CodecInst& audioCodec) const;
+ virtual int32_t Frequency() const;
+ virtual int32_t SetAudioScaling(float scaleFactor);
protected:
- WebRtc_Word32 SetUpAudioDecoder();
+ int32_t SetUpAudioDecoder();
- WebRtc_UWord32 _instanceID;
+ uint32_t _instanceID;
const FileFormats _fileFormat;
MediaFile& _fileModule;
- WebRtc_UWord32 _decodedLengthInMS;
+ uint32_t _decodedLengthInMS;
private:
AudioCoder _audioDecoder;
CodecInst _codec;
- WebRtc_Word32 _numberOf10MsPerFrame;
- WebRtc_Word32 _numberOf10MsInDecoder;
+ int32_t _numberOf10MsPerFrame;
+ int32_t _numberOf10MsInDecoder;
Resampler _resampler;
float _scaling;
@@ -83,37 +83,37 @@
class VideoFilePlayerImpl: public FilePlayerImpl
{
public:
- VideoFilePlayerImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+ VideoFilePlayerImpl(uint32_t instanceID, FileFormats fileFormat);
~VideoFilePlayerImpl();
// FilePlayer functions.
- virtual WebRtc_Word32 TimeUntilNextVideoFrame();
- virtual WebRtc_Word32 StartPlayingVideoFile(const char* fileName,
- bool loop,
- bool videoOnly);
- virtual WebRtc_Word32 StopPlayingFile();
- virtual WebRtc_Word32 video_codec_info(VideoCodec& videoCodec) const;
- virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& videoFrame);
- virtual WebRtc_Word32 GetVideoFromFile(I420VideoFrame& videoFrame,
- const WebRtc_UWord32 outWidth,
- const WebRtc_UWord32 outHeight);
+ virtual int32_t TimeUntilNextVideoFrame();
+ virtual int32_t StartPlayingVideoFile(const char* fileName,
+ bool loop,
+ bool videoOnly);
+ virtual int32_t StopPlayingFile();
+ virtual int32_t video_codec_info(VideoCodec& videoCodec) const;
+ virtual int32_t GetVideoFromFile(I420VideoFrame& videoFrame);
+ virtual int32_t GetVideoFromFile(I420VideoFrame& videoFrame,
+ const uint32_t outWidth,
+ const uint32_t outHeight);
private:
- WebRtc_Word32 SetUpVideoDecoder();
+ int32_t SetUpVideoDecoder();
VideoCoder& _videoDecoder;
VideoCodec video_codec_info_;
- WebRtc_Word32 _decodedVideoFrames;
+ int32_t _decodedVideoFrames;
EncodedVideoData& _encodedData;
FrameScaler& _frameScaler;
CriticalSectionWrapper* _critSec;
TickTime _startTime;
- WebRtc_Word64 _accumulatedRenderTimeMs;
- WebRtc_UWord32 _frameLengthMS;
+ int64_t _accumulatedRenderTimeMs;
+ uint32_t _frameLengthMS;
- WebRtc_Word32 _numberOfFramesRead;
+ int32_t _numberOfFramesRead;
bool _videoOnly;
};
#endif //WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/webrtc/modules/utility/source/file_recorder_impl.cc b/webrtc/modules/utility/source/file_recorder_impl.cc
index 840c79f..fefa4dc 100644
--- a/webrtc/modules/utility/source/file_recorder_impl.cc
+++ b/webrtc/modules/utility/source/file_recorder_impl.cc
@@ -29,7 +29,7 @@
#endif
namespace webrtc {
-FileRecorder* FileRecorder::CreateFileRecorder(WebRtc_UWord32 instanceID,
+FileRecorder* FileRecorder::CreateFileRecorder(uint32_t instanceID,
FileFormats fileFormat)
{
switch(fileFormat)
@@ -60,7 +60,7 @@
delete recorder;
}
-FileRecorderImpl::FileRecorderImpl(WebRtc_UWord32 instanceID,
+FileRecorderImpl::FileRecorderImpl(uint32_t instanceID,
FileFormats fileFormat)
: _instanceID(instanceID),
_fileFormat(fileFormat),
@@ -83,7 +83,7 @@
return _fileFormat;
}
-WebRtc_Word32 FileRecorderImpl::RegisterModuleFileCallback(
+int32_t FileRecorderImpl::RegisterModuleFileCallback(
FileCallback* callback)
{
if(_moduleFile == NULL)
@@ -93,10 +93,10 @@
return _moduleFile->SetModuleFileCallback(callback);
}
-WebRtc_Word32 FileRecorderImpl::StartRecordingAudioFile(
+int32_t FileRecorderImpl::StartRecordingAudioFile(
const char* fileName,
const CodecInst& codecInst,
- WebRtc_UWord32 notificationTimeMs,
+ uint32_t notificationTimeMs,
ACMAMRPackingFormat amrFormat)
{
if(_moduleFile == NULL)
@@ -106,7 +106,7 @@
codec_info_ = codecInst;
_amrFormat = amrFormat;
- WebRtc_Word32 retVal = 0;
+ int32_t retVal = 0;
if(_fileFormat != kFileFormatAviFile)
{
// AVI files should be started using StartRecordingVideoFile(..) all
@@ -138,16 +138,16 @@
return retVal;
}
-WebRtc_Word32 FileRecorderImpl::StartRecordingAudioFile(
+int32_t FileRecorderImpl::StartRecordingAudioFile(
OutStream& destStream,
const CodecInst& codecInst,
- WebRtc_UWord32 notificationTimeMs,
+ uint32_t notificationTimeMs,
ACMAMRPackingFormat amrFormat)
{
codec_info_ = codecInst;
_amrFormat = amrFormat;
- WebRtc_Word32 retVal = _moduleFile->StartRecordingAudioStream(
+ int32_t retVal = _moduleFile->StartRecordingAudioStream(
destStream,
_fileFormat,
codecInst,
@@ -174,7 +174,7 @@
return retVal;
}
-WebRtc_Word32 FileRecorderImpl::StopRecording()
+int32_t FileRecorderImpl::StopRecording()
{
memset(&codec_info_, 0, sizeof(CodecInst));
return _moduleFile->StopRecording();
@@ -185,7 +185,7 @@
return _moduleFile->IsRecording();
}
-WebRtc_Word32 FileRecorderImpl::RecordAudioToFile(
+int32_t FileRecorderImpl::RecordAudioToFile(
const AudioFrame& incomingAudioFrame,
const TickTime* playoutTS)
{
@@ -209,7 +209,7 @@
tempAudioFrame.sample_rate_hz_ = incomingAudioFrame.sample_rate_hz_;
tempAudioFrame.samples_per_channel_ =
incomingAudioFrame.samples_per_channel_;
- for (WebRtc_UWord16 i = 0;
+ for (uint16_t i = 0;
i < (incomingAudioFrame.samples_per_channel_); i++)
{
// Sample value is the average of left and right buffer rounded to
@@ -227,7 +227,7 @@
tempAudioFrame.sample_rate_hz_ = incomingAudioFrame.sample_rate_hz_;
tempAudioFrame.samples_per_channel_ =
incomingAudioFrame.samples_per_channel_;
- for (WebRtc_UWord16 i = 0;
+ for (uint16_t i = 0;
i < (incomingAudioFrame.samples_per_channel_); i++)
{
// Duplicate sample to both channels
@@ -250,7 +250,7 @@
// NOTE: stereo recording is only supported for WAV files.
// TODO (hellner): WAV expect PCM in little endian byte order. Not
// "encoding" with PCM coder should be a problem for big endian systems.
- WebRtc_UWord32 encodedLenInBytes = 0;
+ uint32_t encodedLenInBytes = 0;
if (_fileFormat == kFileFormatPreencodedFile ||
STR_CASE_CMP(codec_info_.plname, "L16") != 0)
{
@@ -277,7 +277,7 @@
_audioResampler.Push(ptrAudioFrame->data_,
ptrAudioFrame->samples_per_channel_ *
ptrAudioFrame->num_channels_,
- (WebRtc_Word16*)_audioBuffer,
+ (int16_t*)_audioBuffer,
MAX_AUDIO_BUFFER_IN_BYTES, outLen);
} else {
_audioResampler.ResetIfNeeded(ptrAudioFrame->sample_rate_hz_,
@@ -285,10 +285,10 @@
kResamplerSynchronous);
_audioResampler.Push(ptrAudioFrame->data_,
ptrAudioFrame->samples_per_channel_,
- (WebRtc_Word16*)_audioBuffer,
+ (int16_t*)_audioBuffer,
MAX_AUDIO_BUFFER_IN_BYTES, outLen);
}
- encodedLenInBytes = outLen * sizeof(WebRtc_Word16);
+ encodedLenInBytes = outLen * sizeof(int16_t);
}
// Codec may not be operating at a frame rate of 10 ms. Whenever enough
@@ -296,11 +296,11 @@
// will be available. Wait until then.
if (encodedLenInBytes)
{
- WebRtc_UWord16 msOfData =
+ uint16_t msOfData =
ptrAudioFrame->samples_per_channel_ /
- WebRtc_UWord16(ptrAudioFrame->sample_rate_hz_ / 1000);
+ uint16_t(ptrAudioFrame->sample_rate_hz_ / 1000);
if (WriteEncodedAudioData(_audioBuffer,
- (WebRtc_UWord16)encodedLenInBytes,
+ (uint16_t)encodedLenInBytes,
msOfData, playoutTS) == -1)
{
return -1;
@@ -309,7 +309,7 @@
return 0;
}
-WebRtc_Word32 FileRecorderImpl::SetUpAudioEncoder()
+int32_t FileRecorderImpl::SetUpAudioEncoder()
{
if (_fileFormat == kFileFormatPreencodedFile ||
STR_CASE_CMP(codec_info_.plname, "L16") != 0)
@@ -328,7 +328,7 @@
return 0;
}
-WebRtc_Word32 FileRecorderImpl::codec_info(CodecInst& codecInst) const
+int32_t FileRecorderImpl::codec_info(CodecInst& codecInst) const
{
if(codec_info_.plfreq == 0)
{
@@ -338,10 +338,10 @@
return 0;
}
-WebRtc_Word32 FileRecorderImpl::WriteEncodedAudioData(
- const WebRtc_Word8* audioBuffer,
- WebRtc_UWord16 bufferLength,
- WebRtc_UWord16 /*millisecondsOfData*/,
+int32_t FileRecorderImpl::WriteEncodedAudioData(
+ const int8_t* audioBuffer,
+ uint16_t bufferLength,
+ uint16_t /*millisecondsOfData*/,
const TickTime* /*playoutTS*/)
{
return _moduleFile->IncomingAudioData(audioBuffer, bufferLength);
@@ -352,9 +352,9 @@
class AudioFrameFileInfo
{
public:
- AudioFrameFileInfo(const WebRtc_Word8* audioData,
- const WebRtc_UWord16 audioSize,
- const WebRtc_UWord16 audioMS,
+ AudioFrameFileInfo(const int8_t* audioData,
+ const uint16_t audioSize,
+ const uint16_t audioMS,
const TickTime& playoutTS)
: _audioData(), _audioSize(audioSize), _audioMS(audioMS),
_playoutTS(playoutTS)
@@ -368,13 +368,13 @@
memcpy(_audioData, audioData, audioSize);
};
// TODO (hellner): either turn into a struct or provide get/set functions.
- WebRtc_Word8 _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
- WebRtc_UWord16 _audioSize;
- WebRtc_UWord16 _audioMS;
+ int8_t _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
+ uint16_t _audioSize;
+ uint16_t _audioMS;
TickTime _playoutTS;
};
-AviRecorder::AviRecorder(WebRtc_UWord32 instanceID, FileFormats fileFormat)
+AviRecorder::AviRecorder(uint32_t instanceID, FileFormats fileFormat)
: FileRecorderImpl(instanceID, fileFormat),
_videoOnly(false),
_thread( 0),
@@ -403,7 +403,7 @@
delete _critSec;
}
-WebRtc_Word32 AviRecorder::StartRecordingVideoFile(
+int32_t AviRecorder::StartRecordingVideoFile(
const char* fileName,
const CodecInst& audioCodecInst,
const VideoCodec& videoCodecInst,
@@ -446,7 +446,7 @@
return 0;
}
-WebRtc_Word32 AviRecorder::StopRecording()
+int32_t AviRecorder::StopRecording()
{
_timeEvent.StopTimer();
@@ -454,12 +454,12 @@
return FileRecorderImpl::StopRecording();
}
-WebRtc_Word32 AviRecorder::CalcI420FrameSize( ) const
+int32_t AviRecorder::CalcI420FrameSize( ) const
{
return 3 * _videoCodecInst.width * _videoCodecInst.height / 2;
}
-WebRtc_Word32 AviRecorder::SetUpVideoEncoder()
+int32_t AviRecorder::SetUpVideoEncoder()
{
// Size of unencoded data (I420) should be the largest possible frame size
// in a file.
@@ -469,7 +469,7 @@
_videoCodecInst.plType = _videoEncoder->DefaultPayloadType(
_videoCodecInst.plName);
- WebRtc_Word32 useNumberOfCores = 1;
+ int32_t useNumberOfCores = 1;
// Set the max payload size to 16000. This means that the codec will try to
// create slices that will fit in 16000 kByte packets. However, the
// Encode() call will still generate one full frame.
@@ -481,7 +481,7 @@
return 0;
}
-WebRtc_Word32 AviRecorder::RecordVideoToFile(const I420VideoFrame& videoFrame)
+int32_t AviRecorder::RecordVideoToFile(const I420VideoFrame& videoFrame)
{
CriticalSectionScoped lock(_critSec);
if(!IsRecording() || videoFrame.IsZeroSize())
@@ -489,7 +489,7 @@
return -1;
}
// The frame is written to file in AviRecorder::Process().
- WebRtc_Word32 retVal = _videoFramesQueue->AddFrame(videoFrame);
+ int32_t retVal = _videoFramesQueue->AddFrame(videoFrame);
if(retVal != 0)
{
StopRecording();
@@ -540,7 +540,7 @@
return static_cast<AviRecorder*>( threadObj)->Process();
}
-WebRtc_Word32 AviRecorder::ProcessAudio()
+int32_t AviRecorder::ProcessAudio()
{
if (_writtenVideoFramesCounter == 0)
{
@@ -552,9 +552,9 @@
{
// Syncronize audio to the current frame to process by throwing away
// audio samples with older timestamp than the video frame.
- WebRtc_UWord32 numberOfAudioElements =
+ uint32_t numberOfAudioElements =
_audioFramesToWrite.GetSize();
- for (WebRtc_UWord32 i = 0; i < numberOfAudioElements; ++i)
+ for (uint32_t i = 0; i < numberOfAudioElements; ++i)
{
AudioFrameFileInfo* frameInfo =
(AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
@@ -575,9 +575,9 @@
}
}
// Write all audio up to current timestamp.
- WebRtc_Word32 error = 0;
- WebRtc_UWord32 numberOfAudioElements = _audioFramesToWrite.GetSize();
- for (WebRtc_UWord32 i = 0; i < numberOfAudioElements; ++i)
+ int32_t error = 0;
+ uint32_t numberOfAudioElements = _audioFramesToWrite.GetSize();
+ for (uint32_t i = 0; i < numberOfAudioElements; ++i)
{
AudioFrameFileInfo* frameInfo =
(AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
@@ -626,7 +626,7 @@
{
return true;
}
- WebRtc_Word32 error = 0;
+ int32_t error = 0;
if(!_videoOnly)
{
if(!_firstAudioFrameReceived)
@@ -646,7 +646,7 @@
"AviRecorder::Process() error writing to file.");
break;
} else {
- WebRtc_UWord32 frameLengthMS = 1000 /
+ uint32_t frameLengthMS = 1000 /
_videoCodecInst.maxFramerate;
_writtenVideoFramesCounter++;
_writtenVideoMS += frameLengthMS;
@@ -656,7 +656,7 @@
// Frame rate is in frames per seconds. Frame length is
// calculated as an integer division which means it may
// be rounded down. Compensate for this every second.
- WebRtc_UWord32 rest = 1000 % frameLengthMS;
+ uint32_t rest = 1000 % frameLengthMS;
_writtenVideoMS += rest;
}
}
@@ -667,10 +667,10 @@
// drift. Once a full frame worth of drift has happened, skip writing
// one frame. Note that frame rate is in frames per second so the
// drift is completely compensated for.
- WebRtc_UWord32 frameLengthMS = 1000/_videoCodecInst.maxFramerate;
- WebRtc_UWord32 restMS = 1000 % frameLengthMS;
- WebRtc_UWord32 frameSkip = (_videoCodecInst.maxFramerate *
- frameLengthMS) / restMS;
+ uint32_t frameLengthMS = 1000/_videoCodecInst.maxFramerate;
+ uint32_t restMS = 1000 % frameLengthMS;
+ uint32_t frameSkip = (_videoCodecInst.maxFramerate *
+ frameLengthMS) / restMS;
_writtenVideoFramesCounter++;
if(_writtenVideoFramesCounter % frameSkip == 0)
@@ -691,7 +691,7 @@
return error == 0;
}
-WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame)
+int32_t AviRecorder::EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame)
{
if (!IsRecording() || videoFrame.IsZeroSize())
{
@@ -731,7 +731,7 @@
if(_videoEncodedData.payloadSize > 0)
{
if(_moduleFile->IncomingAVIVideoData(
- (WebRtc_Word8*)(_videoEncodedData.payloadData),
+ (int8_t*)(_videoEncodedData.payloadData),
_videoEncodedData.payloadSize))
{
WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
@@ -751,10 +751,10 @@
// Store audio frame in the _audioFramesToWrite buffer. The writing to file
// happens in AviRecorder::Process().
-WebRtc_Word32 AviRecorder::WriteEncodedAudioData(
- const WebRtc_Word8* audioBuffer,
- WebRtc_UWord16 bufferLength,
- WebRtc_UWord16 millisecondsOfData,
+int32_t AviRecorder::WriteEncodedAudioData(
+ const int8_t* audioBuffer,
+ uint16_t bufferLength,
+ uint16_t millisecondsOfData,
const TickTime* playoutTS)
{
if (!IsRecording())
diff --git a/webrtc/modules/utility/source/file_recorder_impl.h b/webrtc/modules/utility/source/file_recorder_impl.h
index 60d3b5c..f8921d0 100644
--- a/webrtc/modules/utility/source/file_recorder_impl.h
+++ b/webrtc/modules/utility/source/file_recorder_impl.h
@@ -43,29 +43,29 @@
class FileRecorderImpl : public FileRecorder
{
public:
- FileRecorderImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+ FileRecorderImpl(uint32_t instanceID, FileFormats fileFormat);
virtual ~FileRecorderImpl();
// FileRecorder functions.
- virtual WebRtc_Word32 RegisterModuleFileCallback(FileCallback* callback);
+ virtual int32_t RegisterModuleFileCallback(FileCallback* callback);
virtual FileFormats RecordingFileFormat() const;
- virtual WebRtc_Word32 StartRecordingAudioFile(
+ virtual int32_t StartRecordingAudioFile(
const char* fileName,
const CodecInst& codecInst,
- WebRtc_UWord32 notificationTimeMs,
+ uint32_t notificationTimeMs,
ACMAMRPackingFormat amrFormat = AMRFileStorage);
- virtual WebRtc_Word32 StartRecordingAudioFile(
+ virtual int32_t StartRecordingAudioFile(
OutStream& destStream,
const CodecInst& codecInst,
- WebRtc_UWord32 notificationTimeMs,
+ uint32_t notificationTimeMs,
ACMAMRPackingFormat amrFormat = AMRFileStorage);
- virtual WebRtc_Word32 StopRecording();
+ virtual int32_t StopRecording();
virtual bool IsRecording() const;
- virtual WebRtc_Word32 codec_info(CodecInst& codecInst) const;
- virtual WebRtc_Word32 RecordAudioToFile(
+ virtual int32_t codec_info(CodecInst& codecInst) const;
+ virtual int32_t RecordAudioToFile(
const AudioFrame& frame,
const TickTime* playoutTS = NULL);
- virtual WebRtc_Word32 StartRecordingVideoFile(
+ virtual int32_t StartRecordingVideoFile(
const char* fileName,
const CodecInst& audioCodecInst,
const VideoCodec& videoCodecInst,
@@ -74,21 +74,21 @@
{
return -1;
}
- virtual WebRtc_Word32 RecordVideoToFile(const I420VideoFrame& videoFrame)
+ virtual int32_t RecordVideoToFile(const I420VideoFrame& videoFrame)
{
return -1;
}
protected:
- virtual WebRtc_Word32 WriteEncodedAudioData(
- const WebRtc_Word8* audioBuffer,
- WebRtc_UWord16 bufferLength,
- WebRtc_UWord16 millisecondsOfData,
+ virtual int32_t WriteEncodedAudioData(
+ const int8_t* audioBuffer,
+ uint16_t bufferLength,
+ uint16_t millisecondsOfData,
const TickTime* playoutTS);
- WebRtc_Word32 SetUpAudioEncoder();
+ int32_t SetUpAudioEncoder();
- WebRtc_UWord32 _instanceID;
+ uint32_t _instanceID;
FileFormats _fileFormat;
MediaFile* _moduleFile;
@@ -96,7 +96,7 @@
CodecInst codec_info_;
ACMAMRPackingFormat _amrFormat;
- WebRtc_Word8 _audioBuffer[MAX_AUDIO_BUFFER_IN_BYTES];
+ int8_t _audioBuffer[MAX_AUDIO_BUFFER_IN_BYTES];
AudioCoder _audioEncoder;
Resampler _audioResampler;
};
@@ -106,24 +106,24 @@
class AviRecorder : public FileRecorderImpl
{
public:
- AviRecorder(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+ AviRecorder(uint32_t instanceID, FileFormats fileFormat);
virtual ~AviRecorder();
// FileRecorder functions.
- virtual WebRtc_Word32 StartRecordingVideoFile(
+ virtual int32_t StartRecordingVideoFile(
const char* fileName,
const CodecInst& audioCodecInst,
const VideoCodec& videoCodecInst,
ACMAMRPackingFormat amrFormat = AMRFileStorage,
bool videoOnly = false);
- virtual WebRtc_Word32 StopRecording();
- virtual WebRtc_Word32 RecordVideoToFile(const I420VideoFrame& videoFrame);
+ virtual int32_t StopRecording();
+ virtual int32_t RecordVideoToFile(const I420VideoFrame& videoFrame);
protected:
- virtual WebRtc_Word32 WriteEncodedAudioData(
- const WebRtc_Word8* audioBuffer,
- WebRtc_UWord16 bufferLength,
- WebRtc_UWord16 millisecondsOfData,
+ virtual int32_t WriteEncodedAudioData(
+ const int8_t* audioBuffer,
+ uint16_t bufferLength,
+ uint16_t millisecondsOfData,
const TickTime* playoutTS);
private:
static bool Run(ThreadObj threadObj);
@@ -132,11 +132,11 @@
bool StartThread();
bool StopThread();
- WebRtc_Word32 EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame);
- WebRtc_Word32 ProcessAudio();
+ int32_t EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame);
+ int32_t ProcessAudio();
- WebRtc_Word32 CalcI420FrameSize() const;
- WebRtc_Word32 SetUpVideoEncoder();
+ int32_t CalcI420FrameSize() const;
+ int32_t SetUpVideoEncoder();
VideoCodec _videoCodecInst;
bool _videoOnly;
@@ -148,15 +148,15 @@
FrameScaler* _frameScaler;
VideoCoder* _videoEncoder;
- WebRtc_Word32 _videoMaxPayloadSize;
+ int32_t _videoMaxPayloadSize;
EncodedVideoData _videoEncodedData;
ThreadWrapper* _thread;
EventWrapper& _timeEvent;
CriticalSectionWrapper* _critSec;
- WebRtc_Word64 _writtenVideoFramesCounter;
- WebRtc_Word64 _writtenAudioMS;
- WebRtc_Word64 _writtenVideoMS;
+ int64_t _writtenVideoFramesCounter;
+ int64_t _writtenAudioMS;
+ int64_t _writtenVideoMS;
};
#endif // WEBRTC_MODULE_UTILITY_VIDEO
} // namespace webrtc
diff --git a/webrtc/modules/utility/source/process_thread_impl.cc b/webrtc/modules/utility/source/process_thread_impl.cc
index bdbd5ca..61b3e33 100644
--- a/webrtc/modules/utility/source/process_thread_impl.cc
+++ b/webrtc/modules/utility/source/process_thread_impl.cc
@@ -42,7 +42,7 @@
WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
}
-WebRtc_Word32 ProcessThreadImpl::Start()
+int32_t ProcessThreadImpl::Start()
{
CriticalSectionScoped lock(_critSectModules);
if(_thread)
@@ -52,7 +52,7 @@
_thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
"ProcessThread");
unsigned int id;
- WebRtc_Word32 retVal = _thread->Start(id);
+ int32_t retVal = _thread->Start(id);
if(retVal >= 0)
{
return 0;
@@ -62,7 +62,7 @@
return -1;
}
-WebRtc_Word32 ProcessThreadImpl::Stop()
+int32_t ProcessThreadImpl::Stop()
{
_critSectModules->Enter();
if(_thread)
@@ -87,13 +87,13 @@
return 0;
}
-WebRtc_Word32 ProcessThreadImpl::RegisterModule(const Module* module)
+int32_t ProcessThreadImpl::RegisterModule(const Module* module)
{
CriticalSectionScoped lock(_critSectModules);
// Only allow module to be registered once.
ListItem* item = _modules.First();
- for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+ for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
{
if(module == item->GetItem())
{
@@ -113,12 +113,12 @@
return 0;
}
-WebRtc_Word32 ProcessThreadImpl::DeRegisterModule(const Module* module)
+int32_t ProcessThreadImpl::DeRegisterModule(const Module* module)
{
CriticalSectionScoped lock(_critSectModules);
ListItem* item = _modules.First();
- for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+ for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
{
if(module == item->GetItem())
{
@@ -142,13 +142,13 @@
{
// Wait for the module that should be called next, but don't block thread
// longer than 100 ms.
- WebRtc_Word32 minTimeToNext = 100;
+ int32_t minTimeToNext = 100;
{
CriticalSectionScoped lock(_critSectModules);
ListItem* item = _modules.First();
- for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+ for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
{
- WebRtc_Word32 timeToNext =
+ int32_t timeToNext =
static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
if(minTimeToNext > timeToNext)
{
@@ -173,9 +173,9 @@
{
CriticalSectionScoped lock(_critSectModules);
ListItem* item = _modules.First();
- for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+ for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
{
- WebRtc_Word32 timeToNext =
+ int32_t timeToNext =
static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
if(timeToNext < 1)
{
diff --git a/webrtc/modules/utility/source/process_thread_impl.h b/webrtc/modules/utility/source/process_thread_impl.h
index 79b1272..7edb565 100644
--- a/webrtc/modules/utility/source/process_thread_impl.h
+++ b/webrtc/modules/utility/source/process_thread_impl.h
@@ -25,11 +25,11 @@
ProcessThreadImpl();
virtual ~ProcessThreadImpl();
- virtual WebRtc_Word32 Start();
- virtual WebRtc_Word32 Stop();
+ virtual int32_t Start();
+ virtual int32_t Stop();
- virtual WebRtc_Word32 RegisterModule(const Module* module);
- virtual WebRtc_Word32 DeRegisterModule(const Module* module);
+ virtual int32_t RegisterModule(const Module* module);
+ virtual int32_t DeRegisterModule(const Module* module);
protected:
static bool Run(void* obj);
diff --git a/webrtc/modules/utility/source/rtp_dump_impl.cc b/webrtc/modules/utility/source/rtp_dump_impl.cc
index 69a52ec..74de4ac 100644
--- a/webrtc/modules/utility/source/rtp_dump_impl.cc
+++ b/webrtc/modules/utility/source/rtp_dump_impl.cc
@@ -40,7 +40,7 @@
namespace webrtc {
const char RTPFILE_VERSION[] = "1.0";
-const WebRtc_UWord32 MAX_UWORD32 = 0xffffffff;
+const uint32_t MAX_UWORD32 = 0xffffffff;
// This stucture is specified in the rtpdump documentation.
// This struct corresponds to RD_packet_t in
@@ -49,11 +49,11 @@
{
// Length of packet, including this header (may be smaller than plen if not
// whole packet recorded).
- WebRtc_UWord16 length;
+ uint16_t length;
// Actual header+payload length for RTP, 0 for RTCP.
- WebRtc_UWord16 plen;
+ uint16_t plen;
// Milliseconds since the start of recording.
- WebRtc_UWord32 offset;
+ uint32_t offset;
} rtpDumpPktHdr_t;
RtpDump* RtpDump::CreateRtpDump()
@@ -87,7 +87,7 @@
WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
}
-WebRtc_Word32 RtpDumpImpl::Start(const char* fileNameUTF8)
+int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
{
if (fileNameUTF8 == NULL)
@@ -136,7 +136,7 @@
return 0;
}
-WebRtc_Word32 RtpDumpImpl::Stop()
+int32_t RtpDumpImpl::Stop()
{
CriticalSectionScoped lock(_critSect);
_file.Flush();
@@ -150,8 +150,7 @@
return _file.Open();
}
-WebRtc_Word32 RtpDumpImpl::DumpPacket(const WebRtc_UWord8* packet,
- WebRtc_UWord16 packetLength)
+int32_t RtpDumpImpl::DumpPacket(const uint8_t* packet, uint16_t packetLength)
{
CriticalSectionScoped lock(_critSect);
if (!IsActive())
@@ -174,7 +173,7 @@
bool isRTCP = RTCP(packet);
rtpDumpPktHdr_t hdr;
- WebRtc_UWord32 offset;
+ uint32_t offset;
// Offset is relative to when recording was started.
offset = GetTimeInMS();
@@ -187,14 +186,14 @@
}
hdr.offset = RtpDumpHtonl(offset);
- hdr.length = RtpDumpHtons((WebRtc_UWord16)(packetLength + sizeof(hdr)));
+ hdr.length = RtpDumpHtons((uint16_t)(packetLength + sizeof(hdr)));
if (isRTCP)
{
hdr.plen = 0;
}
else
{
- hdr.plen = RtpDumpHtons((WebRtc_UWord16)packetLength);
+ hdr.plen = RtpDumpHtons((uint16_t)packetLength);
}
if (!_file.Write(&hdr, sizeof(hdr)))
@@ -213,9 +212,9 @@
return 0;
}
-bool RtpDumpImpl::RTCP(const WebRtc_UWord8* packet) const
+bool RtpDumpImpl::RTCP(const uint8_t* packet) const
{
- const WebRtc_UWord8 payloadType = packet[1];
+ const uint8_t payloadType = packet[1];
bool is_rtcp = false;
switch(payloadType)
@@ -234,7 +233,7 @@
}
// TODO (hellner): why is TickUtil not used here?
-inline WebRtc_UWord32 RtpDumpImpl::GetTimeInMS() const
+inline uint32_t RtpDumpImpl::GetTimeInMS() const
{
#if defined(_WIN32)
return timeGetTime();
@@ -253,7 +252,7 @@
#endif
}
-inline WebRtc_UWord32 RtpDumpImpl::RtpDumpHtonl(WebRtc_UWord32 x) const
+inline uint32_t RtpDumpImpl::RtpDumpHtonl(uint32_t x) const
{
#if defined(WEBRTC_BIG_ENDIAN)
return x;
@@ -267,7 +266,7 @@
#endif
}
-inline WebRtc_UWord16 RtpDumpImpl::RtpDumpHtons(WebRtc_UWord16 x) const
+inline uint16_t RtpDumpImpl::RtpDumpHtons(uint16_t x) const
{
#if defined(WEBRTC_BIG_ENDIAN)
return x;
diff --git a/webrtc/modules/utility/source/rtp_dump_impl.h b/webrtc/modules/utility/source/rtp_dump_impl.h
index 9715c35..bb72dff 100644
--- a/webrtc/modules/utility/source/rtp_dump_impl.h
+++ b/webrtc/modules/utility/source/rtp_dump_impl.h
@@ -22,28 +22,27 @@
RtpDumpImpl();
virtual ~RtpDumpImpl();
- virtual WebRtc_Word32 Start(const char* fileNameUTF8);
- virtual WebRtc_Word32 Stop();
+ virtual int32_t Start(const char* fileNameUTF8);
+ virtual int32_t Stop();
virtual bool IsActive() const;
- virtual WebRtc_Word32 DumpPacket(const WebRtc_UWord8* packet,
- WebRtc_UWord16 packetLength);
+ virtual int32_t DumpPacket(const uint8_t* packet, uint16_t packetLength);
private:
// Return the system time in ms.
- inline WebRtc_UWord32 GetTimeInMS() const;
+ inline uint32_t GetTimeInMS() const;
// Return x in network byte order (big endian).
- inline WebRtc_UWord32 RtpDumpHtonl(WebRtc_UWord32 x) const;
+ inline uint32_t RtpDumpHtonl(uint32_t x) const;
// Return x in network byte order (big endian).
- inline WebRtc_UWord16 RtpDumpHtons(WebRtc_UWord16 x) const;
+ inline uint16_t RtpDumpHtons(uint16_t x) const;
// Return true if the packet starts with a valid RTCP header.
// Note: See ModuleRTPUtility::RTPHeaderParser::RTCP() for details on how
// to determine if the packet is an RTCP packet.
- bool RTCP(const WebRtc_UWord8* packet) const;
+ bool RTCP(const uint8_t* packet) const;
private:
CriticalSectionWrapper* _critSect;
FileWrapper& _file;
- WebRtc_UWord32 _startTime;
+ uint32_t _startTime;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
diff --git a/webrtc/modules/utility/source/video_coder.cc b/webrtc/modules/utility/source/video_coder.cc
index cc33161..01e4ae0 100644
--- a/webrtc/modules/utility/source/video_coder.cc
+++ b/webrtc/modules/utility/source/video_coder.cc
@@ -13,7 +13,7 @@
#include "video_coder.h"
namespace webrtc {
-VideoCoder::VideoCoder(WebRtc_UWord32 instanceID)
+VideoCoder::VideoCoder(uint32_t instanceID)
: _vcm(VideoCodingModule::Create(instanceID)),
_decodedVideo(0)
{
@@ -29,7 +29,7 @@
VideoCodingModule::Destroy(_vcm);
}
-WebRtc_Word32 VideoCoder::ResetDecoder()
+int32_t VideoCoder::ResetDecoder()
{
_vcm->ResetDecoder();
@@ -41,9 +41,9 @@
return 0;
}
-WebRtc_Word32 VideoCoder::SetEncodeCodec(VideoCodec& videoCodecInst,
- WebRtc_UWord32 numberOfCores,
- WebRtc_UWord32 maxPayloadSize)
+int32_t VideoCoder::SetEncodeCodec(VideoCodec& videoCodecInst,
+ uint32_t numberOfCores,
+ uint32_t maxPayloadSize)
{
if(_vcm->RegisterSendCodec(&videoCodecInst, numberOfCores,
maxPayloadSize) != VCM_OK)
@@ -54,12 +54,12 @@
}
-WebRtc_Word32 VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst,
- WebRtc_Word32 numberOfCores)
+int32_t VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst,
+ int32_t numberOfCores)
{
if (videoCodecInst.plType == 0)
{
- WebRtc_Word8 plType = DefaultPayloadType(videoCodecInst.plName);
+ int8_t plType = DefaultPayloadType(videoCodecInst.plName);
if (plType == -1)
{
return -1;
@@ -74,8 +74,8 @@
return 0;
}
-WebRtc_Word32 VideoCoder::Decode(I420VideoFrame& decodedVideo,
- const EncodedVideoData& encodedData)
+int32_t VideoCoder::Decode(I420VideoFrame& decodedVideo,
+ const EncodedVideoData& encodedData)
{
decodedVideo.ResetSize();
if(encodedData.payloadSize <= 0)
@@ -92,8 +92,8 @@
}
-WebRtc_Word32 VideoCoder::Encode(const I420VideoFrame& videoFrame,
- EncodedVideoData& videoEncodedData)
+int32_t VideoCoder::Encode(const I420VideoFrame& videoFrame,
+ EncodedVideoData& videoEncodedData)
{
// The AddVideoFrame(..) call will (indirectly) call SendData(). Store a
// pointer to videoFrame so that it can be updated.
@@ -106,11 +106,11 @@
return 0;
}
-WebRtc_Word8 VideoCoder::DefaultPayloadType(const char* plName)
+int8_t VideoCoder::DefaultPayloadType(const char* plName)
{
VideoCodec tmpCodec;
- WebRtc_Word32 numberOfCodecs = _vcm->NumberOfCodecs();
- for (WebRtc_UWord8 i = 0; i < numberOfCodecs; i++)
+ int32_t numberOfCodecs = _vcm->NumberOfCodecs();
+ for (uint8_t i = 0; i < numberOfCodecs; i++)
{
_vcm->Codec(i, &tmpCodec);
if(strncmp(tmpCodec.plName, plName, kPayloadNameSize) == 0)
@@ -121,18 +121,18 @@
return -1;
}
-WebRtc_Word32 VideoCoder::FrameToRender(I420VideoFrame& videoFrame)
+int32_t VideoCoder::FrameToRender(I420VideoFrame& videoFrame)
{
return _decodedVideo->CopyFrame(videoFrame);
}
-WebRtc_Word32 VideoCoder::SendData(
+int32_t VideoCoder::SendData(
const FrameType frameType,
- const WebRtc_UWord8 payloadType,
- const WebRtc_UWord32 timeStamp,
+ const uint8_t payloadType,
+ const uint32_t timeStamp,
int64_t capture_time_ms,
- const WebRtc_UWord8* payloadData,
- WebRtc_UWord32 payloadSize,
+ const uint8_t* payloadData,
+ uint32_t payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* /*rtpVideoHdr*/)
{
@@ -144,7 +144,7 @@
_videoEncodedData->timeStamp = timeStamp;
_videoEncodedData->fragmentationHeader.CopyFrom(fragmentationHeader);
memcpy(_videoEncodedData->payloadData, payloadData,
- sizeof(WebRtc_UWord8) * payloadSize);
+ sizeof(uint8_t) * payloadSize);
_videoEncodedData->payloadSize = payloadSize;
return 0;
}
diff --git a/webrtc/modules/utility/source/video_coder.h b/webrtc/modules/utility/source/video_coder.h
index b1d8c7d..c69c3e4 100644
--- a/webrtc/modules/utility/source/video_coder.h
+++ b/webrtc/modules/utility/source/video_coder.h
@@ -20,43 +20,42 @@
class VideoCoder : public VCMPacketizationCallback, public VCMReceiveCallback
{
public:
- VideoCoder(WebRtc_UWord32 instanceID);
+ VideoCoder(uint32_t instanceID);
~VideoCoder();
- WebRtc_Word32 ResetDecoder();
+ int32_t ResetDecoder();
- WebRtc_Word32 SetEncodeCodec(VideoCodec& videoCodecInst,
- WebRtc_UWord32 numberOfCores,
- WebRtc_UWord32 maxPayloadSize);
+ int32_t SetEncodeCodec(VideoCodec& videoCodecInst,
+ uint32_t numberOfCores,
+ uint32_t maxPayloadSize);
// Select the codec that should be used for decoding. videoCodecInst.plType
// will be set to the codec's default payload type.
- WebRtc_Word32 SetDecodeCodec(VideoCodec& videoCodecInst,
- WebRtc_Word32 numberOfCores);
+ int32_t SetDecodeCodec(VideoCodec& videoCodecInst, int32_t numberOfCores);
- WebRtc_Word32 Decode(I420VideoFrame& decodedVideo,
- const EncodedVideoData& encodedData);
+ int32_t Decode(I420VideoFrame& decodedVideo,
+ const EncodedVideoData& encodedData);
- WebRtc_Word32 Encode(const I420VideoFrame& videoFrame,
- EncodedVideoData& videoEncodedData);
+ int32_t Encode(const I420VideoFrame& videoFrame,
+ EncodedVideoData& videoEncodedData);
- WebRtc_Word8 DefaultPayloadType(const char* plName);
+ int8_t DefaultPayloadType(const char* plName);
private:
// VCMReceiveCallback function.
// Note: called by VideoCodingModule when decoding finished.
- WebRtc_Word32 FrameToRender(I420VideoFrame& videoFrame);
+ int32_t FrameToRender(I420VideoFrame& videoFrame);
// VCMPacketizationCallback function.
// Note: called by VideoCodingModule when encoding finished.
- WebRtc_Word32 SendData(
+ int32_t SendData(
FrameType /*frameType*/,
- WebRtc_UWord8 /*payloadType*/,
- WebRtc_UWord32 /*timeStamp*/,
+ uint8_t /*payloadType*/,
+ uint32_t /*timeStamp*/,
int64_t capture_time_ms,
- const WebRtc_UWord8* payloadData,
- WebRtc_UWord32 payloadSize,
+ const uint8_t* payloadData,
+ uint32_t payloadSize,
const RTPFragmentationHeader& /* fragmentationHeader*/,
const RTPVideoHeader* rtpTypeHdr);
diff --git a/webrtc/modules/utility/source/video_frames_queue.cc b/webrtc/modules/utility/source/video_frames_queue.cc
index 5a1ea59..039c9e8 100644
--- a/webrtc/modules/utility/source/video_frames_queue.cc
+++ b/webrtc/modules/utility/source/video_frames_queue.cc
@@ -47,7 +47,7 @@
}
}
-WebRtc_Word32 VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
+int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
I420VideoFrame* ptrFrameToAdd = NULL;
// Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
if (!_emptyFrames.Empty()) {
@@ -112,7 +112,7 @@
return ptrRenderFrame;
}
-WebRtc_Word32 VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame) {
+int32_t VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame) {
ptrOldFrame->set_timestamp(0);
ptrOldFrame->set_width(0);
ptrOldFrame->set_height(0);
@@ -122,7 +122,7 @@
return 0;
}
-WebRtc_Word32 VideoFramesQueue::SetRenderDelay(WebRtc_UWord32 renderDelay) {
+int32_t VideoFramesQueue::SetRenderDelay(uint32_t renderDelay) {
_renderDelayMs = renderDelay;
return 0;
}
diff --git a/webrtc/modules/utility/source/video_frames_queue.h b/webrtc/modules/utility/source/video_frames_queue.h
index 3f63f65..af6821e 100644
--- a/webrtc/modules/utility/source/video_frames_queue.h
+++ b/webrtc/modules/utility/source/video_frames_queue.h
@@ -26,7 +26,7 @@
~VideoFramesQueue();
// Put newFrame (last) in the queue.
- WebRtc_Word32 AddFrame(const I420VideoFrame& newFrame);
+ int32_t AddFrame(const I420VideoFrame& newFrame);
// Return the most current frame. I.e. the frame with the highest
// VideoFrame::RenderTimeMs() that is lower than
@@ -34,12 +34,12 @@
I420VideoFrame* FrameToRecord();
// Set the render delay estimate to renderDelay ms.
- WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 renderDelay);
+ int32_t SetRenderDelay(uint32_t renderDelay);
protected:
// Make ptrOldFrame available for re-use. I.e. put it in the empty frames
// queue.
- WebRtc_Word32 ReturnFrame(I420VideoFrame* ptrOldFrame);
+ int32_t ReturnFrame(I420VideoFrame* ptrOldFrame);
private:
// Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames.
@@ -54,7 +54,7 @@
ListWrapper _emptyFrames;
// Estimated render delay.
- WebRtc_UWord32 _renderDelayMs;
+ uint32_t _renderDelayMs;
};
} // namespace webrtc
#endif // WEBRTC_MODULE_UTILITY_VIDEO