blob: 4b0f7d0091ed28fd9740146bee5a9caf4c09480f [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <assert.h>
12#include <iostream>
13
14#include "audio_coding_module.h"
15#include "Channel.h"
16#include "tick_util.h"
17#include "typedefs.h"
18#include "common_types.h"
19
tina.legrand@webrtc.org554ae1a2011-12-16 10:09:04 +000020namespace webrtc {
niklase@google.com470e71d2011-07-07 08:21:25 +000021
22WebRtc_Word32
23Channel::SendData(
24 const FrameType frameType,
25 const WebRtc_UWord8 payloadType,
26 const WebRtc_UWord32 timeStamp,
27 const WebRtc_UWord8* payloadData,
28 const WebRtc_UWord16 payloadSize,
29 const RTPFragmentationHeader* fragmentation)
30{
31 WebRtcRTPHeader rtpInfo;
32 WebRtc_Word32 status;
33 WebRtc_UWord16 payloadDataSize = payloadSize;
34
35 rtpInfo.header.markerBit = false;
36 rtpInfo.header.ssrc = 0;
37 rtpInfo.header.sequenceNumber = _seqNo++;
38 rtpInfo.header.payloadType = payloadType;
39 rtpInfo.header.timestamp = timeStamp;
40 if(frameType == kAudioFrameCN)
41 {
42 rtpInfo.type.Audio.isCNG = true;
43 }
44 else
45 {
46 rtpInfo.type.Audio.isCNG = false;
47 }
48 if(frameType == kFrameEmpty)
49 {
50 // Skip this frame
51 return 0;
52 }
53
54 rtpInfo.type.Audio.channel = 1;
55 // Treat fragmentation separately
56 if(fragmentation != NULL)
57 {
58 if((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) && // silence for too long send only new data
59 (fragmentation->fragmentationVectorSize == 2))
60 {
61 // only 0x80 if we have multiple blocks
62 _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
63 WebRtc_UWord32 REDheader = (((WebRtc_UWord32)fragmentation->fragmentationTimeDiff[1]) << 10) + fragmentation->fragmentationLength[1];
64 _payloadData[1] = WebRtc_UWord8((REDheader >> 16) & 0x000000FF);
65 _payloadData[2] = WebRtc_UWord8((REDheader >> 8) & 0x000000FF);
66 _payloadData[3] = WebRtc_UWord8(REDheader & 0x000000FF);
67
68 _payloadData[4] = fragmentation->fragmentationPlType[0];
69 // copy the RED data
70 memcpy(_payloadData + 5,
71 payloadData + fragmentation->fragmentationOffset[1],
72 fragmentation->fragmentationLength[1]);
73 // copy the normal data
74 memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
75 payloadData + fragmentation->fragmentationOffset[0],
76 fragmentation->fragmentationLength[0]);
77 payloadDataSize += 5;
78 } else
79 {
80 // single block (newest one)
81 memcpy(_payloadData,
82 payloadData + fragmentation->fragmentationOffset[0],
83 fragmentation->fragmentationLength[0]);
84 payloadDataSize = WebRtc_UWord16(fragmentation->fragmentationLength[0]);
85 rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
86 }
87 }
88 else
89 {
90 memcpy(_payloadData, payloadData, payloadDataSize);
91 if(_isStereo)
92 {
93 if(_leftChannel)
94 {
95 memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
96 _leftChannel = false;
97 rtpInfo.type.Audio.channel = 1;
98 }
99 else
100 {
101 memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
102 _leftChannel = true;
103 rtpInfo.type.Audio.channel = 2;
104 }
105 }
106 }
107
108 _channelCritSect->Enter();
109 if(_saveBitStream)
110 {
111 //fwrite(payloadData, sizeof(WebRtc_UWord8), payloadSize, _bitStreamFile);
112 }
113
114 if(!_isStereo)
115 {
116 CalcStatistics(rtpInfo, payloadSize);
117 }
118 _lastInTimestamp = timeStamp;
119 _totalBytes += payloadDataSize;
120 _channelCritSect->Leave();
121
122 if(_useFECTestWithPacketLoss)
123 {
124 _packetLoss += 1;
125 if(_packetLoss == 3)
126 {
127 _packetLoss = 0;
128 return 0;
129 }
130 }
131
tina.legrand@webrtc.org16b6b902012-04-12 11:02:38 +0000132 status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize,
133 rtpInfo);
niklase@google.com470e71d2011-07-07 08:21:25 +0000134
135 return status;
136}
137
138void
139Channel::CalcStatistics(
140 WebRtcRTPHeader& rtpInfo,
141 WebRtc_UWord16 payloadSize)
142{
143 int n;
144 if((rtpInfo.header.payloadType != _lastPayloadType) &&
145 (_lastPayloadType != -1))
146 {
147 // payload-type is changed.
148 // we have to terminate the calculations on the previous payload type
149 // we ignore the last packet in that payload type just to make things
150 // easier.
151 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
152 {
153 if(_lastPayloadType == _payloadStats[n].payloadType)
154 {
155 _payloadStats[n].newPacket = true;
156 break;
157 }
158 }
159 }
160 _lastPayloadType = rtpInfo.header.payloadType;
161
162 bool newPayload = true;
163 ACMTestPayloadStats* currentPayloadStr;
164 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
165 {
166 if(rtpInfo.header.payloadType == _payloadStats[n].payloadType)
167 {
168 newPayload = false;
169 currentPayloadStr = &_payloadStats[n];
170 break;
171 }
172 }
173
174 if(!newPayload)
175 {
176 if(!currentPayloadStr->newPacket)
177 {
178 WebRtc_UWord32 lastFrameSizeSample = (WebRtc_UWord32)((WebRtc_UWord32)rtpInfo.header.timestamp -
179 (WebRtc_UWord32)currentPayloadStr->lastTimestamp);
180 assert(lastFrameSizeSample > 0);
181 int k = 0;
182 while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
183 lastFrameSizeSample) &&
184 (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0))
185 {
186 k++;
187 }
188 ACMTestFrameSizeStats* currentFrameSizeStats =
189 &(currentPayloadStr->frameSizeStats[k]);
190 currentFrameSizeStats->frameSizeSample = (WebRtc_Word16)lastFrameSizeSample;
191
192 // increment the number of encoded samples.
193 currentFrameSizeStats->totalEncodedSamples +=
194 lastFrameSizeSample;
195 // increment the number of recveived packets
196 currentFrameSizeStats->numPackets++;
197 // increment the total number of bytes (this is based on
198 // the previous payload we don't know the frame-size of
199 // the current payload.
200 currentFrameSizeStats->totalPayloadLenByte +=
201 currentPayloadStr->lastPayloadLenByte;
202 // store the maximum payload-size (this is based on
203 // the previous payload we don't know the frame-size of
204 // the current payload.
205 if(currentFrameSizeStats->maxPayloadLen <
206 currentPayloadStr->lastPayloadLenByte)
207 {
208 currentFrameSizeStats->maxPayloadLen =
209 currentPayloadStr->lastPayloadLenByte;
210 }
211 // store the current values for the next time
212 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
213 currentPayloadStr->lastPayloadLenByte = payloadSize;
214 }
215 else
216 {
217 currentPayloadStr->newPacket = false;
218 currentPayloadStr->lastPayloadLenByte = payloadSize;
219 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
220 currentPayloadStr->payloadType = rtpInfo.header.payloadType;
221 }
222 }
223 else
224 {
225 n = 0;
226 while(_payloadStats[n].payloadType != -1)
227 {
228 n++;
229 }
230 // first packet
231 _payloadStats[n].newPacket = false;
232 _payloadStats[n].lastPayloadLenByte = payloadSize;
233 _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
234 _payloadStats[n].payloadType = rtpInfo.header.payloadType;
235 }
236}
237
238Channel::Channel(WebRtc_Word16 chID) :
239_receiverACM(NULL),
240_seqNo(0),
241_channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
242_bitStreamFile(NULL),
243_saveBitStream(false),
244_lastPayloadType(-1),
245_isStereo(false),
246_leftChannel(true),
niklase@google.com470e71d2011-07-07 08:21:25 +0000247_lastInTimestamp(0),
tina.legrand@webrtc.org2e096922011-08-18 06:20:30 +0000248_packetLoss(0),
249_useFECTestWithPacketLoss(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000250_chID(chID),
251_beginTime(TickTime::MillisecondTimestamp()),
252_totalBytes(0)
253{
254 int n;
255 int k;
256 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
257 {
258 _payloadStats[n].payloadType = -1;
259 _payloadStats[n].newPacket = true;
260 for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
261 {
262 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
263 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
264 _payloadStats[n].frameSizeStats[k].numPackets = 0;
265 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
266 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
267 }
268 }
269 if(chID >= 0)
270 {
271 _saveBitStream = true;
272 char bitStreamFileName[500];
273 sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
274 _bitStreamFile = fopen(bitStreamFileName, "wb");
275 }
276 else
277 {
278 _saveBitStream = false;
279 }
280}
281
282Channel::~Channel()
283{
284 delete _channelCritSect;
285}
286
287void
288Channel::RegisterReceiverACM(AudioCodingModule* acm)
289{
290 _receiverACM = acm;
291 return;
292}
293
294void
295Channel::ResetStats()
296{
297 int n;
298 int k;
299 _channelCritSect->Enter();
300 _lastPayloadType = -1;
301 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
302 {
303 _payloadStats[n].payloadType = -1;
304 _payloadStats[n].newPacket = true;
305 for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
306 {
307 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
308 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
309 _payloadStats[n].frameSizeStats[k].numPackets = 0;
310 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
311 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
312 }
313 }
314 _beginTime = TickTime::MillisecondTimestamp();
315 _totalBytes = 0;
316 _channelCritSect->Leave();
317}
318
319WebRtc_Word16
320Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
321{
322 _channelCritSect->Enter();
323 int n;
324 payloadStats.payloadType = -1;
325 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
326 {
327 if(_payloadStats[n].payloadType == codecInst.pltype)
328 {
329 memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
330 break;
331 }
332 }
333 if(payloadStats.payloadType == -1)
334 {
335 _channelCritSect->Leave();
336 return -1;
337 }
338 for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
339 {
340 if(payloadStats.frameSizeStats[n].frameSizeSample == 0)
341 {
342 _channelCritSect->Leave();
343 return 0;
344 }
345 payloadStats.frameSizeStats[n].usageLenSec =
346 (double)payloadStats.frameSizeStats[n].totalEncodedSamples
347 / (double)codecInst.plfreq;
348
349 payloadStats.frameSizeStats[n].rateBitPerSec =
350 payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 /
351 payloadStats.frameSizeStats[n].usageLenSec;
352
353 }
354 _channelCritSect->Leave();
355 return 0;
356}
357
358void
359Channel::Stats(WebRtc_UWord32* numPackets)
360{
361 _channelCritSect->Enter();
362 int k;
363 int n;
364 memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
365 for(k = 0; k < MAX_NUM_PAYLOADS; k++)
366 {
367 if(_payloadStats[k].payloadType == -1)
368 {
369 break;
370 }
371 numPackets[k] = 0;
372 for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
373 {
374 if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
375 {
376 break;
377 }
378 numPackets[k] +=
379 _payloadStats[k].frameSizeStats[n].numPackets;
380 }
381 }
382 _channelCritSect->Leave();
383}
384
385void
386Channel::Stats(WebRtc_UWord8* payloadType, WebRtc_UWord32* payloadLenByte)
387{
388 _channelCritSect->Enter();
389
390 int k;
391 int n;
392 memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
393 for(k = 0; k < MAX_NUM_PAYLOADS; k++)
394 {
395 if(_payloadStats[k].payloadType == -1)
396 {
397 break;
398 }
399 payloadType[k] = (WebRtc_UWord8)_payloadStats[k].payloadType;
400 payloadLenByte[k] = 0;
401 for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
402 {
403 if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
404 {
405 break;
406 }
407 payloadLenByte[k] += (WebRtc_UWord16)
408 _payloadStats[k].frameSizeStats[n].totalPayloadLenByte;
409 }
410 }
411
412 _channelCritSect->Leave();
413}
414
415
416void
417Channel::PrintStats(CodecInst& codecInst)
418{
419 ACMTestPayloadStats payloadStats;
420 Stats(codecInst, payloadStats);
421 printf("%s %d kHz\n",
422 codecInst.plname,
423 codecInst.plfreq / 1000);
424 printf("=====================================================\n");
425 if(payloadStats.payloadType == -1)
426 {
427 printf("No Packets are sent with payload-type %d (%s)\n\n",
428 codecInst.pltype,
429 codecInst.plname);
430 return;
431 }
432 for(int k = 0; k < MAX_NUM_FRAMESIZES; k++)
433 {
434 if(payloadStats.frameSizeStats[k].frameSizeSample == 0)
435 {
436 break;
437 }
438 printf("Frame-size.................... %d samples\n",
439 payloadStats.frameSizeStats[k].frameSizeSample);
440 printf("Average Rate.................. %.0f bits/sec\n",
441 payloadStats.frameSizeStats[k].rateBitPerSec);
442 printf("Maximum Payload-Size.......... %d Bytes\n",
443 payloadStats.frameSizeStats[k].maxPayloadLen);
444 printf("Maximum Instantaneous Rate.... %.0f bits/sec\n",
445 ((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 *
446 (double)codecInst.plfreq) /
447 (double)payloadStats.frameSizeStats[k].frameSizeSample);
448 printf("Number of Packets............. %u\n",
449 (unsigned int)payloadStats.frameSizeStats[k].numPackets);
450 printf("Duration...................... %0.3f sec\n\n",
451 payloadStats.frameSizeStats[k].usageLenSec);
452
453 }
454
455}
456
457WebRtc_UWord32
458Channel::LastInTimestamp()
459{
460 WebRtc_UWord32 timestamp;
461 _channelCritSect->Enter();
462 timestamp = _lastInTimestamp;
463 _channelCritSect->Leave();
464 return timestamp;
465}
466
467double
468Channel::BitRate()
469{
470 double rate;
471 WebRtc_UWord64 currTime = TickTime::MillisecondTimestamp();
472 _channelCritSect->Enter();
473 rate = ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
474 _channelCritSect->Leave();
475 return rate;
476}
tina.legrand@webrtc.org554ae1a2011-12-16 10:09:04 +0000477
478} // namespace webrtc