blob: 2e097f74df96d20bbf49333c06fe460f91c8428f [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <assert.h>
12#include <iostream>
13
14#include "audio_coding_module.h"
15#include "Channel.h"
16#include "tick_util.h"
17#include "typedefs.h"
18#include "common_types.h"
19
tina.legrand@webrtc.org554ae1a2011-12-16 10:09:04 +000020namespace webrtc {
niklase@google.com470e71d2011-07-07 08:21:25 +000021
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000022int32_t Channel::SendData(const FrameType frameType, const uint8_t payloadType,
23 const uint32_t timeStamp, const uint8_t* payloadData,
24 const uint16_t payloadSize,
25 const RTPFragmentationHeader* fragmentation) {
26 WebRtcRTPHeader rtpInfo;
27 int32_t status;
28 uint16_t payloadDataSize = payloadSize;
niklase@google.com470e71d2011-07-07 08:21:25 +000029
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000030 rtpInfo.header.markerBit = false;
31 rtpInfo.header.ssrc = 0;
32 rtpInfo.header.sequenceNumber = _seqNo++;
33 rtpInfo.header.payloadType = payloadType;
34 rtpInfo.header.timestamp = timeStamp;
35 if (frameType == kAudioFrameCN) {
36 rtpInfo.type.Audio.isCNG = true;
37 } else {
38 rtpInfo.type.Audio.isCNG = false;
39 }
40 if (frameType == kFrameEmpty) {
41 // Skip this frame
niklase@google.com470e71d2011-07-07 08:21:25 +000042 return 0;
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000043 }
niklase@google.com470e71d2011-07-07 08:21:25 +000044
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000045 rtpInfo.type.Audio.channel = 1;
46 // Treat fragmentation separately
47 if (fragmentation != NULL) {
48 // If silence for too long, send only new data.
49 if ((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) &&
50 (fragmentation->fragmentationVectorSize == 2)) {
51 // only 0x80 if we have multiple blocks
52 _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
53 uint32_t REDheader = (((uint32_t) fragmentation->fragmentationTimeDiff[1])
54 << 10) + fragmentation->fragmentationLength[1];
55 _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
56 _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
57 _payloadData[3] = uint8_t(REDheader & 0x000000FF);
58
59 _payloadData[4] = fragmentation->fragmentationPlType[0];
60 // copy the RED data
61 memcpy(_payloadData + 5,
62 payloadData + fragmentation->fragmentationOffset[1],
63 fragmentation->fragmentationLength[1]);
64 // copy the normal data
65 memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
66 payloadData + fragmentation->fragmentationOffset[0],
67 fragmentation->fragmentationLength[0]);
68 payloadDataSize += 5;
69 } else {
70 // single block (newest one)
71 memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
72 fragmentation->fragmentationLength[0]);
73 payloadDataSize = uint16_t(fragmentation->fragmentationLength[0]);
74 rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
niklase@google.com470e71d2011-07-07 08:21:25 +000075 }
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000076 } else {
77 memcpy(_payloadData, payloadData, payloadDataSize);
78 if (_isStereo) {
79 if (_leftChannel) {
80 memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
81 _leftChannel = false;
82 rtpInfo.type.Audio.channel = 1;
83 } else {
84 memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
85 _leftChannel = true;
86 rtpInfo.type.Audio.channel = 2;
87 }
niklase@google.com470e71d2011-07-07 08:21:25 +000088 }
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000089 }
niklase@google.com470e71d2011-07-07 08:21:25 +000090
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000091 _channelCritSect->Enter();
92 if (_saveBitStream) {
93 //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
94 }
niklase@google.com470e71d2011-07-07 08:21:25 +000095
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +000096 if (!_isStereo) {
97 CalcStatistics(rtpInfo, payloadSize);
98 }
99 _lastInTimestamp = timeStamp;
100 _totalBytes += payloadDataSize;
101 _channelCritSect->Leave();
niklase@google.com470e71d2011-07-07 08:21:25 +0000102
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000103 if (_useFECTestWithPacketLoss) {
104 _packetLoss += 1;
105 if (_packetLoss == 3) {
106 _packetLoss = 0;
107 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000108 }
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000109 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000110
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000111 status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize, rtpInfo);
112
113 return status;
114}
115
116void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, uint16_t payloadSize) {
117 int n;
118 if ((rtpInfo.header.payloadType != _lastPayloadType)
119 && (_lastPayloadType != -1)) {
120 // payload-type is changed.
121 // we have to terminate the calculations on the previous payload type
122 // we ignore the last packet in that payload type just to make things
123 // easier.
124 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
125 if (_lastPayloadType == _payloadStats[n].payloadType) {
126 _payloadStats[n].newPacket = true;
127 break;
128 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000129 }
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000130 }
131 _lastPayloadType = rtpInfo.header.payloadType;
niklase@google.com470e71d2011-07-07 08:21:25 +0000132
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000133 bool newPayload = true;
134 ACMTestPayloadStats* currentPayloadStr = NULL;
135 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
136 if (rtpInfo.header.payloadType == _payloadStats[n].payloadType) {
137 newPayload = false;
138 currentPayloadStr = &_payloadStats[n];
139 break;
140 }
141 }
142
143 if (!newPayload) {
144 if (!currentPayloadStr->newPacket) {
145 uint32_t lastFrameSizeSample = (uint32_t)(
146 (uint32_t) rtpInfo.header.timestamp
147 - (uint32_t) currentPayloadStr->lastTimestamp);
148 assert(lastFrameSizeSample > 0);
149 int k = 0;
150 while ((currentPayloadStr->frameSizeStats[k].frameSizeSample
151 != lastFrameSizeSample)
152 && (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0)) {
153 k++;
154 }
155 ACMTestFrameSizeStats* currentFrameSizeStats = &(currentPayloadStr
156 ->frameSizeStats[k]);
157 currentFrameSizeStats->frameSizeSample = (int16_t) lastFrameSizeSample;
158
159 // increment the number of encoded samples.
160 currentFrameSizeStats->totalEncodedSamples += lastFrameSizeSample;
161 // increment the number of recveived packets
162 currentFrameSizeStats->numPackets++;
163 // increment the total number of bytes (this is based on
164 // the previous payload we don't know the frame-size of
165 // the current payload.
166 currentFrameSizeStats->totalPayloadLenByte += currentPayloadStr
167 ->lastPayloadLenByte;
168 // store the maximum payload-size (this is based on
169 // the previous payload we don't know the frame-size of
170 // the current payload.
171 if (currentFrameSizeStats->maxPayloadLen
172 < currentPayloadStr->lastPayloadLenByte) {
173 currentFrameSizeStats->maxPayloadLen = currentPayloadStr
174 ->lastPayloadLenByte;
175 }
176 // store the current values for the next time
177 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
178 currentPayloadStr->lastPayloadLenByte = payloadSize;
179 } else {
180 currentPayloadStr->newPacket = false;
181 currentPayloadStr->lastPayloadLenByte = payloadSize;
182 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
183 currentPayloadStr->payloadType = rtpInfo.header.payloadType;
184 }
185 } else {
186 n = 0;
187 while (_payloadStats[n].payloadType != -1) {
188 n++;
189 }
190 // first packet
191 _payloadStats[n].newPacket = false;
192 _payloadStats[n].lastPayloadLenByte = payloadSize;
193 _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
194 _payloadStats[n].payloadType = rtpInfo.header.payloadType;
195 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000196}
197
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000198Channel::Channel(int16_t chID)
199 : _receiverACM(NULL),
200 _seqNo(0),
201 _channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
202 _bitStreamFile(NULL),
203 _saveBitStream(false),
204 _lastPayloadType(-1),
205 _isStereo(false),
206 _leftChannel(true),
207 _lastInTimestamp(0),
208 _packetLoss(0),
209 _useFECTestWithPacketLoss(false),
210 _beginTime(TickTime::MillisecondTimestamp()),
211 _totalBytes(0) {
212 int n;
213 int k;
214 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
215 _payloadStats[n].payloadType = -1;
216 _payloadStats[n].newPacket = true;
217 for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
218 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
219 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
220 _payloadStats[n].frameSizeStats[k].numPackets = 0;
221 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
222 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
223 }
224 }
225 if (chID >= 0) {
226 _saveBitStream = true;
227 char bitStreamFileName[500];
228 sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
229 _bitStreamFile = fopen(bitStreamFileName, "wb");
230 } else {
231 _saveBitStream = false;
232 }
233}
234
235Channel::~Channel() {
236 delete _channelCritSect;
237}
238
239void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
240 _receiverACM = acm;
241 return;
242}
243
244void Channel::ResetStats() {
245 int n;
246 int k;
247 _channelCritSect->Enter();
248 _lastPayloadType = -1;
249 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
250 _payloadStats[n].payloadType = -1;
251 _payloadStats[n].newPacket = true;
252 for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
253 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
254 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
255 _payloadStats[n].frameSizeStats[k].numPackets = 0;
256 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
257 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
258 }
259 }
260 _beginTime = TickTime::MillisecondTimestamp();
261 _totalBytes = 0;
262 _channelCritSect->Leave();
263}
264
265int16_t Channel::Stats(CodecInst& codecInst,
266 ACMTestPayloadStats& payloadStats) {
267 _channelCritSect->Enter();
268 int n;
269 payloadStats.payloadType = -1;
270 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
271 if (_payloadStats[n].payloadType == codecInst.pltype) {
272 memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
273 break;
274 }
275 }
276 if (payloadStats.payloadType == -1) {
niklase@google.com470e71d2011-07-07 08:21:25 +0000277 _channelCritSect->Leave();
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000278 return -1;
279 }
280 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
281 if (payloadStats.frameSizeStats[n].frameSizeSample == 0) {
282 _channelCritSect->Leave();
283 return 0;
284 }
285 payloadStats.frameSizeStats[n].usageLenSec = (double) payloadStats
286 .frameSizeStats[n].totalEncodedSamples / (double) codecInst.plfreq;
287
288 payloadStats.frameSizeStats[n].rateBitPerSec =
289 payloadStats.frameSizeStats[n].totalPayloadLenByte * 8
290 / payloadStats.frameSizeStats[n].usageLenSec;
291
292 }
293 _channelCritSect->Leave();
294 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000295}
296
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000297void Channel::Stats(uint32_t* numPackets) {
298 _channelCritSect->Enter();
299 int k;
300 int n;
301 memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
302 for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
303 if (_payloadStats[k].payloadType == -1) {
304 break;
305 }
306 numPackets[k] = 0;
307 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
308 if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
309 break;
310 }
311 numPackets[k] += _payloadStats[k].frameSizeStats[n].numPackets;
312 }
313 }
314 _channelCritSect->Leave();
andrew@webrtc.orgd7a71d02012-08-01 01:40:02 +0000315}
tina.legrand@webrtc.org554ae1a2011-12-16 10:09:04 +0000316
tina.legrand@webrtc.orgd5726a12013-05-03 07:34:12 +0000317void Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte) {
318 _channelCritSect->Enter();
319
320 int k;
321 int n;
322 memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
323 for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
324 if (_payloadStats[k].payloadType == -1) {
325 break;
326 }
327 payloadType[k] = (uint8_t) _payloadStats[k].payloadType;
328 payloadLenByte[k] = 0;
329 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
330 if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
331 break;
332 }
333 payloadLenByte[k] += (uint16_t) _payloadStats[k].frameSizeStats[n]
334 .totalPayloadLenByte;
335 }
336 }
337
338 _channelCritSect->Leave();
339}
340
341void Channel::PrintStats(CodecInst& codecInst) {
342 ACMTestPayloadStats payloadStats;
343 Stats(codecInst, payloadStats);
344 printf("%s %d kHz\n", codecInst.plname, codecInst.plfreq / 1000);
345 printf("=====================================================\n");
346 if (payloadStats.payloadType == -1) {
347 printf("No Packets are sent with payload-type %d (%s)\n\n",
348 codecInst.pltype, codecInst.plname);
349 return;
350 }
351 for (int k = 0; k < MAX_NUM_FRAMESIZES; k++) {
352 if (payloadStats.frameSizeStats[k].frameSizeSample == 0) {
353 break;
354 }
355 printf("Frame-size.................... %d samples\n",
356 payloadStats.frameSizeStats[k].frameSizeSample);
357 printf("Average Rate.................. %.0f bits/sec\n",
358 payloadStats.frameSizeStats[k].rateBitPerSec);
359 printf("Maximum Payload-Size.......... %d Bytes\n",
360 payloadStats.frameSizeStats[k].maxPayloadLen);
361 printf(
362 "Maximum Instantaneous Rate.... %.0f bits/sec\n",
363 ((double) payloadStats.frameSizeStats[k].maxPayloadLen * 8.0
364 * (double) codecInst.plfreq)
365 / (double) payloadStats.frameSizeStats[k].frameSizeSample);
366 printf("Number of Packets............. %u\n",
367 (unsigned int) payloadStats.frameSizeStats[k].numPackets);
368 printf("Duration...................... %0.3f sec\n\n",
369 payloadStats.frameSizeStats[k].usageLenSec);
370
371 }
372
373}
374
375uint32_t Channel::LastInTimestamp() {
376 uint32_t timestamp;
377 _channelCritSect->Enter();
378 timestamp = _lastInTimestamp;
379 _channelCritSect->Leave();
380 return timestamp;
381}
382
383double Channel::BitRate() {
384 double rate;
385 uint64_t currTime = TickTime::MillisecondTimestamp();
386 _channelCritSect->Enter();
387 rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
388 _channelCritSect->Leave();
389 return rate;
390}
391
392} // namespace webrtc