Formatting ACM tests
Pure formatting of all files located in /webrtc/modules/audio_coding/main/test/
Smaller manual modifications done after using Eclipse formatting tool, like wrapping long lines (mostly comments).
BUG=issue1024
Review URL: https://webrtc-codereview.appspot.com/1342004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@3946 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/modules/audio_coding/main/test/Channel.cc b/webrtc/modules/audio_coding/main/test/Channel.cc
index c06e452..2e097f7 100644
--- a/webrtc/modules/audio_coding/main/test/Channel.cc
+++ b/webrtc/modules/audio_coding/main/test/Channel.cc
@@ -19,459 +19,374 @@
namespace webrtc {
-int32_t
-Channel::SendData(
- const FrameType frameType,
- const uint8_t payloadType,
- const uint32_t timeStamp,
- const uint8_t* payloadData,
- const uint16_t payloadSize,
- const RTPFragmentationHeader* fragmentation)
-{
- WebRtcRTPHeader rtpInfo;
- int32_t status;
- uint16_t payloadDataSize = payloadSize;
+int32_t Channel::SendData(const FrameType frameType, const uint8_t payloadType,
+ const uint32_t timeStamp, const uint8_t* payloadData,
+ const uint16_t payloadSize,
+ const RTPFragmentationHeader* fragmentation) {
+ WebRtcRTPHeader rtpInfo;
+ int32_t status;
+ uint16_t payloadDataSize = payloadSize;
- rtpInfo.header.markerBit = false;
- rtpInfo.header.ssrc = 0;
- rtpInfo.header.sequenceNumber = _seqNo++;
- rtpInfo.header.payloadType = payloadType;
- rtpInfo.header.timestamp = timeStamp;
- if(frameType == kAudioFrameCN)
- {
- rtpInfo.type.Audio.isCNG = true;
- }
- else
- {
- rtpInfo.type.Audio.isCNG = false;
- }
- if(frameType == kFrameEmpty)
- {
- // Skip this frame
- return 0;
- }
-
- rtpInfo.type.Audio.channel = 1;
- // Treat fragmentation separately
- if(fragmentation != NULL)
- {
- if((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) && // silence for too long send only new data
- (fragmentation->fragmentationVectorSize == 2))
- {
- // only 0x80 if we have multiple blocks
- _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
- uint32_t REDheader = (((uint32_t)fragmentation->fragmentationTimeDiff[1]) << 10) + fragmentation->fragmentationLength[1];
- _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
- _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
- _payloadData[3] = uint8_t(REDheader & 0x000000FF);
-
- _payloadData[4] = fragmentation->fragmentationPlType[0];
- // copy the RED data
- memcpy(_payloadData + 5,
- payloadData + fragmentation->fragmentationOffset[1],
- fragmentation->fragmentationLength[1]);
- // copy the normal data
- memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
- payloadData + fragmentation->fragmentationOffset[0],
- fragmentation->fragmentationLength[0]);
- payloadDataSize += 5;
- } else
- {
- // single block (newest one)
- memcpy(_payloadData,
- payloadData + fragmentation->fragmentationOffset[0],
- fragmentation->fragmentationLength[0]);
- payloadDataSize = uint16_t(fragmentation->fragmentationLength[0]);
- rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
- }
- }
- else
- {
- memcpy(_payloadData, payloadData, payloadDataSize);
- if(_isStereo)
- {
- if(_leftChannel)
- {
- memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
- _leftChannel = false;
- rtpInfo.type.Audio.channel = 1;
- }
- else
- {
- memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
- _leftChannel = true;
- rtpInfo.type.Audio.channel = 2;
- }
- }
- }
-
- _channelCritSect->Enter();
- if(_saveBitStream)
- {
- //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
- }
-
- if(!_isStereo)
- {
- CalcStatistics(rtpInfo, payloadSize);
- }
- _lastInTimestamp = timeStamp;
- _totalBytes += payloadDataSize;
- _channelCritSect->Leave();
-
- if(_useFECTestWithPacketLoss)
- {
- _packetLoss += 1;
- if(_packetLoss == 3)
- {
- _packetLoss = 0;
- return 0;
- }
- }
-
- status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize,
- rtpInfo);
-
- return status;
-}
-
-void
-Channel::CalcStatistics(
- WebRtcRTPHeader& rtpInfo,
- uint16_t payloadSize)
-{
- int n;
- if((rtpInfo.header.payloadType != _lastPayloadType) &&
- (_lastPayloadType != -1))
- {
- // payload-type is changed.
- // we have to terminate the calculations on the previous payload type
- // we ignore the last packet in that payload type just to make things
- // easier.
- for(n = 0; n < MAX_NUM_PAYLOADS; n++)
- {
- if(_lastPayloadType == _payloadStats[n].payloadType)
- {
- _payloadStats[n].newPacket = true;
- break;
- }
- }
- }
- _lastPayloadType = rtpInfo.header.payloadType;
-
- bool newPayload = true;
- ACMTestPayloadStats* currentPayloadStr = NULL;
- for(n = 0; n < MAX_NUM_PAYLOADS; n++)
- {
- if(rtpInfo.header.payloadType == _payloadStats[n].payloadType)
- {
- newPayload = false;
- currentPayloadStr = &_payloadStats[n];
- break;
- }
- }
-
- if(!newPayload)
- {
- if(!currentPayloadStr->newPacket)
- {
- uint32_t lastFrameSizeSample = (uint32_t)((uint32_t)rtpInfo.header.timestamp -
- (uint32_t)currentPayloadStr->lastTimestamp);
- assert(lastFrameSizeSample > 0);
- int k = 0;
- while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
- lastFrameSizeSample) &&
- (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0))
- {
- k++;
- }
- ACMTestFrameSizeStats* currentFrameSizeStats =
- &(currentPayloadStr->frameSizeStats[k]);
- currentFrameSizeStats->frameSizeSample = (int16_t)lastFrameSizeSample;
-
- // increment the number of encoded samples.
- currentFrameSizeStats->totalEncodedSamples +=
- lastFrameSizeSample;
- // increment the number of recveived packets
- currentFrameSizeStats->numPackets++;
- // increment the total number of bytes (this is based on
- // the previous payload we don't know the frame-size of
- // the current payload.
- currentFrameSizeStats->totalPayloadLenByte +=
- currentPayloadStr->lastPayloadLenByte;
- // store the maximum payload-size (this is based on
- // the previous payload we don't know the frame-size of
- // the current payload.
- if(currentFrameSizeStats->maxPayloadLen <
- currentPayloadStr->lastPayloadLenByte)
- {
- currentFrameSizeStats->maxPayloadLen =
- currentPayloadStr->lastPayloadLenByte;
- }
- // store the current values for the next time
- currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
- currentPayloadStr->lastPayloadLenByte = payloadSize;
- }
- else
- {
- currentPayloadStr->newPacket = false;
- currentPayloadStr->lastPayloadLenByte = payloadSize;
- currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
- currentPayloadStr->payloadType = rtpInfo.header.payloadType;
- }
- }
- else
- {
- n = 0;
- while(_payloadStats[n].payloadType != -1)
- {
- n++;
- }
- // first packet
- _payloadStats[n].newPacket = false;
- _payloadStats[n].lastPayloadLenByte = payloadSize;
- _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
- _payloadStats[n].payloadType = rtpInfo.header.payloadType;
- }
-}
-
-Channel::Channel(int16_t chID) :
-_receiverACM(NULL),
-_seqNo(0),
-_channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
-_bitStreamFile(NULL),
-_saveBitStream(false),
-_lastPayloadType(-1),
-_isStereo(false),
-_leftChannel(true),
-_lastInTimestamp(0),
-_packetLoss(0),
-_useFECTestWithPacketLoss(false),
-_beginTime(TickTime::MillisecondTimestamp()),
-_totalBytes(0)
-{
- int n;
- int k;
- for(n = 0; n < MAX_NUM_PAYLOADS; n++)
- {
- _payloadStats[n].payloadType = -1;
- _payloadStats[n].newPacket = true;
- for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
- {
- _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
- _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
- _payloadStats[n].frameSizeStats[k].numPackets = 0;
- _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
- _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
- }
- }
- if(chID >= 0)
- {
- _saveBitStream = true;
- char bitStreamFileName[500];
- sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
- _bitStreamFile = fopen(bitStreamFileName, "wb");
- }
- else
- {
- _saveBitStream = false;
- }
-}
-
-Channel::~Channel()
-{
- delete _channelCritSect;
-}
-
-void
-Channel::RegisterReceiverACM(AudioCodingModule* acm)
-{
- _receiverACM = acm;
- return;
-}
-
-void
-Channel::ResetStats()
-{
- int n;
- int k;
- _channelCritSect->Enter();
- _lastPayloadType = -1;
- for(n = 0; n < MAX_NUM_PAYLOADS; n++)
- {
- _payloadStats[n].payloadType = -1;
- _payloadStats[n].newPacket = true;
- for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
- {
- _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
- _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
- _payloadStats[n].frameSizeStats[k].numPackets = 0;
- _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
- _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
- }
- }
- _beginTime = TickTime::MillisecondTimestamp();
- _totalBytes = 0;
- _channelCritSect->Leave();
-}
-
-int16_t
-Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
-{
- _channelCritSect->Enter();
- int n;
- payloadStats.payloadType = -1;
- for(n = 0; n < MAX_NUM_PAYLOADS; n++)
- {
- if(_payloadStats[n].payloadType == codecInst.pltype)
- {
- memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
- break;
- }
- }
- if(payloadStats.payloadType == -1)
- {
- _channelCritSect->Leave();
- return -1;
- }
- for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
- {
- if(payloadStats.frameSizeStats[n].frameSizeSample == 0)
- {
- _channelCritSect->Leave();
- return 0;
- }
- payloadStats.frameSizeStats[n].usageLenSec =
- (double)payloadStats.frameSizeStats[n].totalEncodedSamples
- / (double)codecInst.plfreq;
-
- payloadStats.frameSizeStats[n].rateBitPerSec =
- payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 /
- payloadStats.frameSizeStats[n].usageLenSec;
-
- }
- _channelCritSect->Leave();
+ rtpInfo.header.markerBit = false;
+ rtpInfo.header.ssrc = 0;
+ rtpInfo.header.sequenceNumber = _seqNo++;
+ rtpInfo.header.payloadType = payloadType;
+ rtpInfo.header.timestamp = timeStamp;
+ if (frameType == kAudioFrameCN) {
+ rtpInfo.type.Audio.isCNG = true;
+ } else {
+ rtpInfo.type.Audio.isCNG = false;
+ }
+ if (frameType == kFrameEmpty) {
+ // Skip this frame
return 0;
-}
+ }
-void
-Channel::Stats(uint32_t* numPackets)
-{
- _channelCritSect->Enter();
- int k;
- int n;
- memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
- for(k = 0; k < MAX_NUM_PAYLOADS; k++)
- {
- if(_payloadStats[k].payloadType == -1)
- {
- break;
- }
- numPackets[k] = 0;
- for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
- {
- if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
- {
- break;
- }
- numPackets[k] +=
- _payloadStats[k].frameSizeStats[n].numPackets;
- }
+ rtpInfo.type.Audio.channel = 1;
+ // Treat fragmentation separately
+ if (fragmentation != NULL) {
+ // If silence for too long, send only new data.
+ if ((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) &&
+ (fragmentation->fragmentationVectorSize == 2)) {
+ // only 0x80 if we have multiple blocks
+ _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
+ uint32_t REDheader = (((uint32_t) fragmentation->fragmentationTimeDiff[1])
+ << 10) + fragmentation->fragmentationLength[1];
+ _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
+ _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
+ _payloadData[3] = uint8_t(REDheader & 0x000000FF);
+
+ _payloadData[4] = fragmentation->fragmentationPlType[0];
+ // copy the RED data
+ memcpy(_payloadData + 5,
+ payloadData + fragmentation->fragmentationOffset[1],
+ fragmentation->fragmentationLength[1]);
+ // copy the normal data
+ memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
+ payloadData + fragmentation->fragmentationOffset[0],
+ fragmentation->fragmentationLength[0]);
+ payloadDataSize += 5;
+ } else {
+ // single block (newest one)
+ memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
+ fragmentation->fragmentationLength[0]);
+ payloadDataSize = uint16_t(fragmentation->fragmentationLength[0]);
+ rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
}
- _channelCritSect->Leave();
-}
-
-void
-Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte)
-{
- _channelCritSect->Enter();
-
- int k;
- int n;
- memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
- for(k = 0; k < MAX_NUM_PAYLOADS; k++)
- {
- if(_payloadStats[k].payloadType == -1)
- {
- break;
- }
- payloadType[k] = (uint8_t)_payloadStats[k].payloadType;
- payloadLenByte[k] = 0;
- for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
- {
- if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
- {
- break;
- }
- payloadLenByte[k] += (uint16_t)
- _payloadStats[k].frameSizeStats[n].totalPayloadLenByte;
- }
+ } else {
+ memcpy(_payloadData, payloadData, payloadDataSize);
+ if (_isStereo) {
+ if (_leftChannel) {
+ memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
+ _leftChannel = false;
+ rtpInfo.type.Audio.channel = 1;
+ } else {
+ memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
+ _leftChannel = true;
+ rtpInfo.type.Audio.channel = 2;
+ }
}
+ }
- _channelCritSect->Leave();
-}
+ _channelCritSect->Enter();
+ if (_saveBitStream) {
+ //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
+ }
+ if (!_isStereo) {
+ CalcStatistics(rtpInfo, payloadSize);
+ }
+ _lastInTimestamp = timeStamp;
+ _totalBytes += payloadDataSize;
+ _channelCritSect->Leave();
-void
-Channel::PrintStats(CodecInst& codecInst)
-{
- ACMTestPayloadStats payloadStats;
- Stats(codecInst, payloadStats);
- printf("%s %d kHz\n",
- codecInst.plname,
- codecInst.plfreq / 1000);
- printf("=====================================================\n");
- if(payloadStats.payloadType == -1)
- {
- printf("No Packets are sent with payload-type %d (%s)\n\n",
- codecInst.pltype,
- codecInst.plname);
- return;
+ if (_useFECTestWithPacketLoss) {
+ _packetLoss += 1;
+ if (_packetLoss == 3) {
+ _packetLoss = 0;
+ return 0;
}
- for(int k = 0; k < MAX_NUM_FRAMESIZES; k++)
- {
- if(payloadStats.frameSizeStats[k].frameSizeSample == 0)
- {
- break;
- }
- printf("Frame-size.................... %d samples\n",
- payloadStats.frameSizeStats[k].frameSizeSample);
- printf("Average Rate.................. %.0f bits/sec\n",
- payloadStats.frameSizeStats[k].rateBitPerSec);
- printf("Maximum Payload-Size.......... %d Bytes\n",
- payloadStats.frameSizeStats[k].maxPayloadLen);
- printf("Maximum Instantaneous Rate.... %.0f bits/sec\n",
- ((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 *
- (double)codecInst.plfreq) /
- (double)payloadStats.frameSizeStats[k].frameSizeSample);
- printf("Number of Packets............. %u\n",
- (unsigned int)payloadStats.frameSizeStats[k].numPackets);
- printf("Duration...................... %0.3f sec\n\n",
- payloadStats.frameSizeStats[k].usageLenSec);
+ }
+ status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize, rtpInfo);
+
+ return status;
+}
+
+void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, uint16_t payloadSize) {
+ int n;
+ if ((rtpInfo.header.payloadType != _lastPayloadType)
+ && (_lastPayloadType != -1)) {
+ // payload-type is changed.
+ // we have to terminate the calculations on the previous payload type
+ // we ignore the last packet in that payload type just to make things
+ // easier.
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ if (_lastPayloadType == _payloadStats[n].payloadType) {
+ _payloadStats[n].newPacket = true;
+ break;
+ }
}
+ }
+ _lastPayloadType = rtpInfo.header.payloadType;
+ bool newPayload = true;
+ ACMTestPayloadStats* currentPayloadStr = NULL;
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ if (rtpInfo.header.payloadType == _payloadStats[n].payloadType) {
+ newPayload = false;
+ currentPayloadStr = &_payloadStats[n];
+ break;
+ }
+ }
+
+ if (!newPayload) {
+ if (!currentPayloadStr->newPacket) {
+ uint32_t lastFrameSizeSample = (uint32_t)(
+ (uint32_t) rtpInfo.header.timestamp
+ - (uint32_t) currentPayloadStr->lastTimestamp);
+ assert(lastFrameSizeSample > 0);
+ int k = 0;
+ while ((currentPayloadStr->frameSizeStats[k].frameSizeSample
+ != lastFrameSizeSample)
+ && (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0)) {
+ k++;
+ }
+ ACMTestFrameSizeStats* currentFrameSizeStats = &(currentPayloadStr
+ ->frameSizeStats[k]);
+ currentFrameSizeStats->frameSizeSample = (int16_t) lastFrameSizeSample;
+
+ // increment the number of encoded samples.
+ currentFrameSizeStats->totalEncodedSamples += lastFrameSizeSample;
+ // increment the number of recveived packets
+ currentFrameSizeStats->numPackets++;
+ // increment the total number of bytes (this is based on
+ // the previous payload we don't know the frame-size of
+ // the current payload.
+ currentFrameSizeStats->totalPayloadLenByte += currentPayloadStr
+ ->lastPayloadLenByte;
+ // store the maximum payload-size (this is based on
+ // the previous payload we don't know the frame-size of
+ // the current payload.
+ if (currentFrameSizeStats->maxPayloadLen
+ < currentPayloadStr->lastPayloadLenByte) {
+ currentFrameSizeStats->maxPayloadLen = currentPayloadStr
+ ->lastPayloadLenByte;
+ }
+ // store the current values for the next time
+ currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
+ currentPayloadStr->lastPayloadLenByte = payloadSize;
+ } else {
+ currentPayloadStr->newPacket = false;
+ currentPayloadStr->lastPayloadLenByte = payloadSize;
+ currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
+ currentPayloadStr->payloadType = rtpInfo.header.payloadType;
+ }
+ } else {
+ n = 0;
+ while (_payloadStats[n].payloadType != -1) {
+ n++;
+ }
+ // first packet
+ _payloadStats[n].newPacket = false;
+ _payloadStats[n].lastPayloadLenByte = payloadSize;
+ _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
+ _payloadStats[n].payloadType = rtpInfo.header.payloadType;
+ }
}
-uint32_t
-Channel::LastInTimestamp()
-{
- uint32_t timestamp;
- _channelCritSect->Enter();
- timestamp = _lastInTimestamp;
+Channel::Channel(int16_t chID)
+ : _receiverACM(NULL),
+ _seqNo(0),
+ _channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+ _bitStreamFile(NULL),
+ _saveBitStream(false),
+ _lastPayloadType(-1),
+ _isStereo(false),
+ _leftChannel(true),
+ _lastInTimestamp(0),
+ _packetLoss(0),
+ _useFECTestWithPacketLoss(false),
+ _beginTime(TickTime::MillisecondTimestamp()),
+ _totalBytes(0) {
+ int n;
+ int k;
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ _payloadStats[n].payloadType = -1;
+ _payloadStats[n].newPacket = true;
+ for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
+ _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
+ _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
+ _payloadStats[n].frameSizeStats[k].numPackets = 0;
+ _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
+ _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
+ }
+ }
+ if (chID >= 0) {
+ _saveBitStream = true;
+ char bitStreamFileName[500];
+ sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
+ _bitStreamFile = fopen(bitStreamFileName, "wb");
+ } else {
+ _saveBitStream = false;
+ }
+}
+
+Channel::~Channel() {
+ delete _channelCritSect;
+}
+
+void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
+ _receiverACM = acm;
+ return;
+}
+
+void Channel::ResetStats() {
+ int n;
+ int k;
+ _channelCritSect->Enter();
+ _lastPayloadType = -1;
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ _payloadStats[n].payloadType = -1;
+ _payloadStats[n].newPacket = true;
+ for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
+ _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
+ _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
+ _payloadStats[n].frameSizeStats[k].numPackets = 0;
+ _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
+ _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
+ }
+ }
+ _beginTime = TickTime::MillisecondTimestamp();
+ _totalBytes = 0;
+ _channelCritSect->Leave();
+}
+
+int16_t Channel::Stats(CodecInst& codecInst,
+ ACMTestPayloadStats& payloadStats) {
+ _channelCritSect->Enter();
+ int n;
+ payloadStats.payloadType = -1;
+ for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
+ if (_payloadStats[n].payloadType == codecInst.pltype) {
+ memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
+ break;
+ }
+ }
+ if (payloadStats.payloadType == -1) {
_channelCritSect->Leave();
- return timestamp;
+ return -1;
+ }
+ for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
+ if (payloadStats.frameSizeStats[n].frameSizeSample == 0) {
+ _channelCritSect->Leave();
+ return 0;
+ }
+ payloadStats.frameSizeStats[n].usageLenSec = (double) payloadStats
+ .frameSizeStats[n].totalEncodedSamples / (double) codecInst.plfreq;
+
+ payloadStats.frameSizeStats[n].rateBitPerSec =
+ payloadStats.frameSizeStats[n].totalPayloadLenByte * 8
+ / payloadStats.frameSizeStats[n].usageLenSec;
+
+ }
+ _channelCritSect->Leave();
+ return 0;
}
-double
-Channel::BitRate()
-{
- double rate;
- uint64_t currTime = TickTime::MillisecondTimestamp();
- _channelCritSect->Enter();
- rate = ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
- _channelCritSect->Leave();
- return rate;
+void Channel::Stats(uint32_t* numPackets) {
+ _channelCritSect->Enter();
+ int k;
+ int n;
+ memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
+ for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
+ if (_payloadStats[k].payloadType == -1) {
+ break;
+ }
+ numPackets[k] = 0;
+ for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
+ if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
+ break;
+ }
+ numPackets[k] += _payloadStats[k].frameSizeStats[n].numPackets;
+ }
+ }
+ _channelCritSect->Leave();
}
-} // namespace webrtc
+void Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte) {
+ _channelCritSect->Enter();
+
+ int k;
+ int n;
+ memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
+ for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
+ if (_payloadStats[k].payloadType == -1) {
+ break;
+ }
+ payloadType[k] = (uint8_t) _payloadStats[k].payloadType;
+ payloadLenByte[k] = 0;
+ for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
+ if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
+ break;
+ }
+ payloadLenByte[k] += (uint16_t) _payloadStats[k].frameSizeStats[n]
+ .totalPayloadLenByte;
+ }
+ }
+
+ _channelCritSect->Leave();
+}
+
+void Channel::PrintStats(CodecInst& codecInst) {
+ ACMTestPayloadStats payloadStats;
+ Stats(codecInst, payloadStats);
+ printf("%s %d kHz\n", codecInst.plname, codecInst.plfreq / 1000);
+ printf("=====================================================\n");
+ if (payloadStats.payloadType == -1) {
+ printf("No Packets are sent with payload-type %d (%s)\n\n",
+ codecInst.pltype, codecInst.plname);
+ return;
+ }
+ for (int k = 0; k < MAX_NUM_FRAMESIZES; k++) {
+ if (payloadStats.frameSizeStats[k].frameSizeSample == 0) {
+ break;
+ }
+ printf("Frame-size.................... %d samples\n",
+ payloadStats.frameSizeStats[k].frameSizeSample);
+ printf("Average Rate.................. %.0f bits/sec\n",
+ payloadStats.frameSizeStats[k].rateBitPerSec);
+ printf("Maximum Payload-Size.......... %d Bytes\n",
+ payloadStats.frameSizeStats[k].maxPayloadLen);
+ printf(
+ "Maximum Instantaneous Rate.... %.0f bits/sec\n",
+ ((double) payloadStats.frameSizeStats[k].maxPayloadLen * 8.0
+ * (double) codecInst.plfreq)
+ / (double) payloadStats.frameSizeStats[k].frameSizeSample);
+ printf("Number of Packets............. %u\n",
+ (unsigned int) payloadStats.frameSizeStats[k].numPackets);
+ printf("Duration...................... %0.3f sec\n\n",
+ payloadStats.frameSizeStats[k].usageLenSec);
+
+ }
+
+}
+
+uint32_t Channel::LastInTimestamp() {
+ uint32_t timestamp;
+ _channelCritSect->Enter();
+ timestamp = _lastInTimestamp;
+ _channelCritSect->Leave();
+ return timestamp;
+}
+
+double Channel::BitRate() {
+ double rate;
+ uint64_t currTime = TickTime::MillisecondTimestamp();
+ _channelCritSect->Enter();
+ rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
+ _channelCritSect->Leave();
+ return rate;
+}
+
+} // namespace webrtc