blob: 8296d2a7fc22c96551c079a1b3ad36492f739e8f [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
turaj@webrtc.orga305e962013-06-06 19:00:09 +000011#include "webrtc/modules/audio_coding/main/test/Channel.h"
12
niklase@google.com470e71d2011-07-07 08:21:25 +000013#include <assert.h>
14#include <iostream>
15
turaj@webrtc.orga305e962013-06-06 19:00:09 +000016#include "webrtc/system_wrappers/interface/tick_util.h"
17#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000018
tina.legrand@webrtc.org554ae1a2011-12-16 10:09:04 +000019namespace webrtc {
niklase@google.com470e71d2011-07-07 08:21:25 +000020
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000021int32_t Channel::SendData(const FrameType frameType, const uint8_t payloadType,
22 const uint32_t timeStamp, const uint8_t* payloadData,
23 const uint16_t payloadSize,
24 const RTPFragmentationHeader* fragmentation) {
25 WebRtcRTPHeader rtpInfo;
26 int32_t status;
27 uint16_t payloadDataSize = payloadSize;
niklase@google.com470e71d2011-07-07 08:21:25 +000028
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000029 rtpInfo.header.markerBit = false;
30 rtpInfo.header.ssrc = 0;
turaj@webrtc.orga305e962013-06-06 19:00:09 +000031 rtpInfo.header.sequenceNumber = (external_sequence_number_ < 0) ?
32 _seqNo++ : static_cast<uint16_t>(external_sequence_number_);
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000033 rtpInfo.header.payloadType = payloadType;
turaj@webrtc.orga305e962013-06-06 19:00:09 +000034 rtpInfo.header.timestamp = (external_send_timestamp_ < 0) ? timeStamp :
35 static_cast<uint32_t>(external_send_timestamp_);
36
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000037 if (frameType == kAudioFrameCN) {
38 rtpInfo.type.Audio.isCNG = true;
39 } else {
40 rtpInfo.type.Audio.isCNG = false;
41 }
42 if (frameType == kFrameEmpty) {
43 // Skip this frame
niklase@google.com470e71d2011-07-07 08:21:25 +000044 return 0;
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000045 }
niklase@google.com470e71d2011-07-07 08:21:25 +000046
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000047 rtpInfo.type.Audio.channel = 1;
48 // Treat fragmentation separately
49 if (fragmentation != NULL) {
50 // If silence for too long, send only new data.
51 if ((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) &&
52 (fragmentation->fragmentationVectorSize == 2)) {
53 // only 0x80 if we have multiple blocks
54 _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
55 uint32_t REDheader = (((uint32_t) fragmentation->fragmentationTimeDiff[1])
56 << 10) + fragmentation->fragmentationLength[1];
57 _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
58 _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
59 _payloadData[3] = uint8_t(REDheader & 0x000000FF);
60
61 _payloadData[4] = fragmentation->fragmentationPlType[0];
62 // copy the RED data
63 memcpy(_payloadData + 5,
64 payloadData + fragmentation->fragmentationOffset[1],
65 fragmentation->fragmentationLength[1]);
66 // copy the normal data
67 memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
68 payloadData + fragmentation->fragmentationOffset[0],
69 fragmentation->fragmentationLength[0]);
70 payloadDataSize += 5;
71 } else {
72 // single block (newest one)
73 memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
74 fragmentation->fragmentationLength[0]);
75 payloadDataSize = uint16_t(fragmentation->fragmentationLength[0]);
76 rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
niklase@google.com470e71d2011-07-07 08:21:25 +000077 }
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000078 } else {
79 memcpy(_payloadData, payloadData, payloadDataSize);
80 if (_isStereo) {
81 if (_leftChannel) {
82 memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
83 _leftChannel = false;
84 rtpInfo.type.Audio.channel = 1;
85 } else {
86 memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
87 _leftChannel = true;
88 rtpInfo.type.Audio.channel = 2;
89 }
niklase@google.com470e71d2011-07-07 08:21:25 +000090 }
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000091 }
niklase@google.com470e71d2011-07-07 08:21:25 +000092
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000093 _channelCritSect->Enter();
94 if (_saveBitStream) {
95 //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
96 }
niklase@google.com470e71d2011-07-07 08:21:25 +000097
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000098 if (!_isStereo) {
99 CalcStatistics(rtpInfo, payloadSize);
100 }
101 _lastInTimestamp = timeStamp;
102 _totalBytes += payloadDataSize;
103 _channelCritSect->Leave();
niklase@google.com470e71d2011-07-07 08:21:25 +0000104
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000105 if (_useFECTestWithPacketLoss) {
106 _packetLoss += 1;
107 if (_packetLoss == 3) {
108 _packetLoss = 0;
109 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000110 }
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000111 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000112
turaj@webrtc.orga305e962013-06-06 19:00:09 +0000113 if (num_packets_to_drop_ > 0) {
114 num_packets_to_drop_--;
115 return 0;
116 }
117
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000118 status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize, rtpInfo);
119
120 return status;
121}
122
123void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, uint16_t payloadSize) {
124 int n;
125 if ((rtpInfo.header.payloadType != _lastPayloadType)
126 && (_lastPayloadType != -1)) {
127 // payload-type is changed.
128 // we have to terminate the calculations on the previous payload type
129 // we ignore the last packet in that payload type just to make things
130 // easier.
131 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
132 if (_lastPayloadType == _payloadStats[n].payloadType) {
133 _payloadStats[n].newPacket = true;
134 break;
135 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000136 }
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000137 }
138 _lastPayloadType = rtpInfo.header.payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000139
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000140 bool newPayload = true;
141 ACMTestPayloadStats* currentPayloadStr = NULL;
142 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
143 if (rtpInfo.header.payloadType == _payloadStats[n].payloadType) {
144 newPayload = false;
145 currentPayloadStr = &_payloadStats[n];
146 break;
147 }
148 }
149
150 if (!newPayload) {
151 if (!currentPayloadStr->newPacket) {
152 uint32_t lastFrameSizeSample = (uint32_t)(
153 (uint32_t) rtpInfo.header.timestamp
154 - (uint32_t) currentPayloadStr->lastTimestamp);
155 assert(lastFrameSizeSample > 0);
156 int k = 0;
157 while ((currentPayloadStr->frameSizeStats[k].frameSizeSample
158 != lastFrameSizeSample)
159 && (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0)) {
160 k++;
161 }
162 ACMTestFrameSizeStats* currentFrameSizeStats = &(currentPayloadStr
163 ->frameSizeStats[k]);
164 currentFrameSizeStats->frameSizeSample = (int16_t) lastFrameSizeSample;
165
166 // increment the number of encoded samples.
167 currentFrameSizeStats->totalEncodedSamples += lastFrameSizeSample;
168 // increment the number of recveived packets
169 currentFrameSizeStats->numPackets++;
170 // increment the total number of bytes (this is based on
171 // the previous payload we don't know the frame-size of
172 // the current payload.
173 currentFrameSizeStats->totalPayloadLenByte += currentPayloadStr
174 ->lastPayloadLenByte;
175 // store the maximum payload-size (this is based on
176 // the previous payload we don't know the frame-size of
177 // the current payload.
178 if (currentFrameSizeStats->maxPayloadLen
179 < currentPayloadStr->lastPayloadLenByte) {
180 currentFrameSizeStats->maxPayloadLen = currentPayloadStr
181 ->lastPayloadLenByte;
182 }
183 // store the current values for the next time
184 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
185 currentPayloadStr->lastPayloadLenByte = payloadSize;
186 } else {
187 currentPayloadStr->newPacket = false;
188 currentPayloadStr->lastPayloadLenByte = payloadSize;
189 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
190 currentPayloadStr->payloadType = rtpInfo.header.payloadType;
191 }
192 } else {
193 n = 0;
194 while (_payloadStats[n].payloadType != -1) {
195 n++;
196 }
197 // first packet
198 _payloadStats[n].newPacket = false;
199 _payloadStats[n].lastPayloadLenByte = payloadSize;
200 _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
201 _payloadStats[n].payloadType = rtpInfo.header.payloadType;
202 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000203}
204
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000205Channel::Channel(int16_t chID)
206 : _receiverACM(NULL),
207 _seqNo(0),
208 _channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
209 _bitStreamFile(NULL),
210 _saveBitStream(false),
211 _lastPayloadType(-1),
212 _isStereo(false),
213 _leftChannel(true),
214 _lastInTimestamp(0),
215 _packetLoss(0),
216 _useFECTestWithPacketLoss(false),
217 _beginTime(TickTime::MillisecondTimestamp()),
turaj@webrtc.orga305e962013-06-06 19:00:09 +0000218 _totalBytes(0),
219 external_send_timestamp_(-1),
220 external_sequence_number_(-1),
221 num_packets_to_drop_(0) {
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000222 int n;
223 int k;
224 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
225 _payloadStats[n].payloadType = -1;
226 _payloadStats[n].newPacket = true;
227 for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
228 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
229 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
230 _payloadStats[n].frameSizeStats[k].numPackets = 0;
231 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
232 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
233 }
234 }
235 if (chID >= 0) {
236 _saveBitStream = true;
237 char bitStreamFileName[500];
238 sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
239 _bitStreamFile = fopen(bitStreamFileName, "wb");
240 } else {
241 _saveBitStream = false;
242 }
243}
244
245Channel::~Channel() {
246 delete _channelCritSect;
247}
248
249void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
250 _receiverACM = acm;
251 return;
252}
253
254void Channel::ResetStats() {
255 int n;
256 int k;
257 _channelCritSect->Enter();
258 _lastPayloadType = -1;
259 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
260 _payloadStats[n].payloadType = -1;
261 _payloadStats[n].newPacket = true;
262 for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
263 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
264 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
265 _payloadStats[n].frameSizeStats[k].numPackets = 0;
266 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
267 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
268 }
269 }
270 _beginTime = TickTime::MillisecondTimestamp();
271 _totalBytes = 0;
272 _channelCritSect->Leave();
273}
274
275int16_t Channel::Stats(CodecInst& codecInst,
276 ACMTestPayloadStats& payloadStats) {
277 _channelCritSect->Enter();
278 int n;
279 payloadStats.payloadType = -1;
280 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
281 if (_payloadStats[n].payloadType == codecInst.pltype) {
282 memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
283 break;
284 }
285 }
286 if (payloadStats.payloadType == -1) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000287 _channelCritSect->Leave();
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000288 return -1;
289 }
290 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
291 if (payloadStats.frameSizeStats[n].frameSizeSample == 0) {
292 _channelCritSect->Leave();
293 return 0;
294 }
295 payloadStats.frameSizeStats[n].usageLenSec = (double) payloadStats
296 .frameSizeStats[n].totalEncodedSamples / (double) codecInst.plfreq;
297
298 payloadStats.frameSizeStats[n].rateBitPerSec =
299 payloadStats.frameSizeStats[n].totalPayloadLenByte * 8
300 / payloadStats.frameSizeStats[n].usageLenSec;
301
302 }
303 _channelCritSect->Leave();
304 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000305}
306
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000307void Channel::Stats(uint32_t* numPackets) {
308 _channelCritSect->Enter();
309 int k;
310 int n;
311 memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
312 for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
313 if (_payloadStats[k].payloadType == -1) {
314 break;
315 }
316 numPackets[k] = 0;
317 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
318 if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
319 break;
320 }
321 numPackets[k] += _payloadStats[k].frameSizeStats[n].numPackets;
322 }
323 }
324 _channelCritSect->Leave();
andrew@webrtc.orgd7a71d02012-08-01 01:40:02 +0000325}
tina.legrand@webrtc.org554ae1a2011-12-16 10:09:04 +0000326
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000327void Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte) {
328 _channelCritSect->Enter();
329
330 int k;
331 int n;
332 memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
333 for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
334 if (_payloadStats[k].payloadType == -1) {
335 break;
336 }
337 payloadType[k] = (uint8_t) _payloadStats[k].payloadType;
338 payloadLenByte[k] = 0;
339 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
340 if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
341 break;
342 }
343 payloadLenByte[k] += (uint16_t) _payloadStats[k].frameSizeStats[n]
344 .totalPayloadLenByte;
345 }
346 }
347
348 _channelCritSect->Leave();
349}
350
351void Channel::PrintStats(CodecInst& codecInst) {
352 ACMTestPayloadStats payloadStats;
353 Stats(codecInst, payloadStats);
354 printf("%s %d kHz\n", codecInst.plname, codecInst.plfreq / 1000);
355 printf("=====================================================\n");
356 if (payloadStats.payloadType == -1) {
357 printf("No Packets are sent with payload-type %d (%s)\n\n",
358 codecInst.pltype, codecInst.plname);
359 return;
360 }
361 for (int k = 0; k < MAX_NUM_FRAMESIZES; k++) {
362 if (payloadStats.frameSizeStats[k].frameSizeSample == 0) {
363 break;
364 }
365 printf("Frame-size.................... %d samples\n",
366 payloadStats.frameSizeStats[k].frameSizeSample);
367 printf("Average Rate.................. %.0f bits/sec\n",
368 payloadStats.frameSizeStats[k].rateBitPerSec);
369 printf("Maximum Payload-Size.......... %d Bytes\n",
370 payloadStats.frameSizeStats[k].maxPayloadLen);
371 printf(
372 "Maximum Instantaneous Rate.... %.0f bits/sec\n",
373 ((double) payloadStats.frameSizeStats[k].maxPayloadLen * 8.0
374 * (double) codecInst.plfreq)
375 / (double) payloadStats.frameSizeStats[k].frameSizeSample);
376 printf("Number of Packets............. %u\n",
377 (unsigned int) payloadStats.frameSizeStats[k].numPackets);
378 printf("Duration...................... %0.3f sec\n\n",
379 payloadStats.frameSizeStats[k].usageLenSec);
380
381 }
382
383}
384
385uint32_t Channel::LastInTimestamp() {
386 uint32_t timestamp;
387 _channelCritSect->Enter();
388 timestamp = _lastInTimestamp;
389 _channelCritSect->Leave();
390 return timestamp;
391}
392
393double Channel::BitRate() {
394 double rate;
395 uint64_t currTime = TickTime::MillisecondTimestamp();
396 _channelCritSect->Enter();
397 rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
398 _channelCritSect->Leave();
399 return rate;
400}
401
402} // namespace webrtc