blob: 363b106078e0ecaf3390098db9254c15fd581dd2 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
2 * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <assert.h>
12#include <iostream>
13
14#include "audio_coding_module.h"
15#include "Channel.h"
16#include "tick_util.h"
17#include "typedefs.h"
18#include "common_types.h"
19
tina.legrand@webrtc.org554ae1a2011-12-16 10:09:04 +000020namespace webrtc {
niklase@google.com470e71d2011-07-07 08:21:25 +000021
22WebRtc_Word32
23Channel::SendData(
24 const FrameType frameType,
25 const WebRtc_UWord8 payloadType,
26 const WebRtc_UWord32 timeStamp,
27 const WebRtc_UWord8* payloadData,
28 const WebRtc_UWord16 payloadSize,
29 const RTPFragmentationHeader* fragmentation)
30{
31 WebRtcRTPHeader rtpInfo;
32 WebRtc_Word32 status;
33 WebRtc_UWord16 payloadDataSize = payloadSize;
34
35 rtpInfo.header.markerBit = false;
36 rtpInfo.header.ssrc = 0;
37 rtpInfo.header.sequenceNumber = _seqNo++;
38 rtpInfo.header.payloadType = payloadType;
39 rtpInfo.header.timestamp = timeStamp;
40 if(frameType == kAudioFrameCN)
41 {
42 rtpInfo.type.Audio.isCNG = true;
43 }
44 else
45 {
46 rtpInfo.type.Audio.isCNG = false;
47 }
48 if(frameType == kFrameEmpty)
49 {
50 // Skip this frame
51 return 0;
52 }
53
54 rtpInfo.type.Audio.channel = 1;
55 // Treat fragmentation separately
56 if(fragmentation != NULL)
57 {
58 if((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) && // silence for too long send only new data
59 (fragmentation->fragmentationVectorSize == 2))
60 {
61 // only 0x80 if we have multiple blocks
62 _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
63 WebRtc_UWord32 REDheader = (((WebRtc_UWord32)fragmentation->fragmentationTimeDiff[1]) << 10) + fragmentation->fragmentationLength[1];
64 _payloadData[1] = WebRtc_UWord8((REDheader >> 16) & 0x000000FF);
65 _payloadData[2] = WebRtc_UWord8((REDheader >> 8) & 0x000000FF);
66 _payloadData[3] = WebRtc_UWord8(REDheader & 0x000000FF);
67
68 _payloadData[4] = fragmentation->fragmentationPlType[0];
69 // copy the RED data
70 memcpy(_payloadData + 5,
71 payloadData + fragmentation->fragmentationOffset[1],
72 fragmentation->fragmentationLength[1]);
73 // copy the normal data
74 memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
75 payloadData + fragmentation->fragmentationOffset[0],
76 fragmentation->fragmentationLength[0]);
77 payloadDataSize += 5;
78 } else
79 {
80 // single block (newest one)
81 memcpy(_payloadData,
82 payloadData + fragmentation->fragmentationOffset[0],
83 fragmentation->fragmentationLength[0]);
84 payloadDataSize = WebRtc_UWord16(fragmentation->fragmentationLength[0]);
85 rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
86 }
87 }
88 else
89 {
90 memcpy(_payloadData, payloadData, payloadDataSize);
91 if(_isStereo)
92 {
93 if(_leftChannel)
94 {
95 memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
96 _leftChannel = false;
97 rtpInfo.type.Audio.channel = 1;
98 }
99 else
100 {
101 memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
102 _leftChannel = true;
103 rtpInfo.type.Audio.channel = 2;
104 }
105 }
106 }
107
108 _channelCritSect->Enter();
109 if(_saveBitStream)
110 {
111 //fwrite(payloadData, sizeof(WebRtc_UWord8), payloadSize, _bitStreamFile);
112 }
113
114 if(!_isStereo)
115 {
116 CalcStatistics(rtpInfo, payloadSize);
117 }
118 _lastInTimestamp = timeStamp;
119 _totalBytes += payloadDataSize;
120 _channelCritSect->Leave();
121
122 if(_useFECTestWithPacketLoss)
123 {
124 _packetLoss += 1;
125 if(_packetLoss == 3)
126 {
127 _packetLoss = 0;
128 return 0;
129 }
130 }
131
132
133 //status = _receiverACM->IncomingPayload((WebRtc_Word8*)_payloadData, payloadSize, payloadType, timeStamp);
134 status = _receiverACM->IncomingPacket((WebRtc_Word8*)_payloadData, payloadDataSize, rtpInfo);
135
136 //delete [] payloadData;
137
138
139
140 return status;
141}
142
143void
144Channel::CalcStatistics(
145 WebRtcRTPHeader& rtpInfo,
146 WebRtc_UWord16 payloadSize)
147{
148 int n;
149 if((rtpInfo.header.payloadType != _lastPayloadType) &&
150 (_lastPayloadType != -1))
151 {
152 // payload-type is changed.
153 // we have to terminate the calculations on the previous payload type
154 // we ignore the last packet in that payload type just to make things
155 // easier.
156 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
157 {
158 if(_lastPayloadType == _payloadStats[n].payloadType)
159 {
160 _payloadStats[n].newPacket = true;
161 break;
162 }
163 }
164 }
165 _lastPayloadType = rtpInfo.header.payloadType;
166
167 bool newPayload = true;
168 ACMTestPayloadStats* currentPayloadStr;
169 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
170 {
171 if(rtpInfo.header.payloadType == _payloadStats[n].payloadType)
172 {
173 newPayload = false;
174 currentPayloadStr = &_payloadStats[n];
175 break;
176 }
177 }
178
179 if(!newPayload)
180 {
181 if(!currentPayloadStr->newPacket)
182 {
183 WebRtc_UWord32 lastFrameSizeSample = (WebRtc_UWord32)((WebRtc_UWord32)rtpInfo.header.timestamp -
184 (WebRtc_UWord32)currentPayloadStr->lastTimestamp);
185 assert(lastFrameSizeSample > 0);
186 int k = 0;
187 while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
188 lastFrameSizeSample) &&
189 (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0))
190 {
191 k++;
192 }
193 ACMTestFrameSizeStats* currentFrameSizeStats =
194 &(currentPayloadStr->frameSizeStats[k]);
195 currentFrameSizeStats->frameSizeSample = (WebRtc_Word16)lastFrameSizeSample;
196
197 // increment the number of encoded samples.
198 currentFrameSizeStats->totalEncodedSamples +=
199 lastFrameSizeSample;
200 // increment the number of recveived packets
201 currentFrameSizeStats->numPackets++;
202 // increment the total number of bytes (this is based on
203 // the previous payload we don't know the frame-size of
204 // the current payload.
205 currentFrameSizeStats->totalPayloadLenByte +=
206 currentPayloadStr->lastPayloadLenByte;
207 // store the maximum payload-size (this is based on
208 // the previous payload we don't know the frame-size of
209 // the current payload.
210 if(currentFrameSizeStats->maxPayloadLen <
211 currentPayloadStr->lastPayloadLenByte)
212 {
213 currentFrameSizeStats->maxPayloadLen =
214 currentPayloadStr->lastPayloadLenByte;
215 }
216 // store the current values for the next time
217 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
218 currentPayloadStr->lastPayloadLenByte = payloadSize;
219 }
220 else
221 {
222 currentPayloadStr->newPacket = false;
223 currentPayloadStr->lastPayloadLenByte = payloadSize;
224 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
225 currentPayloadStr->payloadType = rtpInfo.header.payloadType;
226 }
227 }
228 else
229 {
230 n = 0;
231 while(_payloadStats[n].payloadType != -1)
232 {
233 n++;
234 }
235 // first packet
236 _payloadStats[n].newPacket = false;
237 _payloadStats[n].lastPayloadLenByte = payloadSize;
238 _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
239 _payloadStats[n].payloadType = rtpInfo.header.payloadType;
240 }
241}
242
243Channel::Channel(WebRtc_Word16 chID) :
244_receiverACM(NULL),
245_seqNo(0),
246_channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
247_bitStreamFile(NULL),
248_saveBitStream(false),
249_lastPayloadType(-1),
250_isStereo(false),
251_leftChannel(true),
niklase@google.com470e71d2011-07-07 08:21:25 +0000252_lastInTimestamp(0),
tina.legrand@webrtc.org2e096922011-08-18 06:20:30 +0000253_packetLoss(0),
254_useFECTestWithPacketLoss(false),
niklase@google.com470e71d2011-07-07 08:21:25 +0000255_chID(chID),
256_beginTime(TickTime::MillisecondTimestamp()),
257_totalBytes(0)
258{
259 int n;
260 int k;
261 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
262 {
263 _payloadStats[n].payloadType = -1;
264 _payloadStats[n].newPacket = true;
265 for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
266 {
267 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
268 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
269 _payloadStats[n].frameSizeStats[k].numPackets = 0;
270 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
271 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
272 }
273 }
274 if(chID >= 0)
275 {
276 _saveBitStream = true;
277 char bitStreamFileName[500];
278 sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
279 _bitStreamFile = fopen(bitStreamFileName, "wb");
280 }
281 else
282 {
283 _saveBitStream = false;
284 }
285}
286
287Channel::~Channel()
288{
289 delete _channelCritSect;
290}
291
292void
293Channel::RegisterReceiverACM(AudioCodingModule* acm)
294{
295 _receiverACM = acm;
296 return;
297}
298
299void
300Channel::ResetStats()
301{
302 int n;
303 int k;
304 _channelCritSect->Enter();
305 _lastPayloadType = -1;
306 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
307 {
308 _payloadStats[n].payloadType = -1;
309 _payloadStats[n].newPacket = true;
310 for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
311 {
312 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
313 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
314 _payloadStats[n].frameSizeStats[k].numPackets = 0;
315 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
316 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
317 }
318 }
319 _beginTime = TickTime::MillisecondTimestamp();
320 _totalBytes = 0;
321 _channelCritSect->Leave();
322}
323
324WebRtc_Word16
325Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
326{
327 _channelCritSect->Enter();
328 int n;
329 payloadStats.payloadType = -1;
330 for(n = 0; n < MAX_NUM_PAYLOADS; n++)
331 {
332 if(_payloadStats[n].payloadType == codecInst.pltype)
333 {
334 memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
335 break;
336 }
337 }
338 if(payloadStats.payloadType == -1)
339 {
340 _channelCritSect->Leave();
341 return -1;
342 }
343 for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
344 {
345 if(payloadStats.frameSizeStats[n].frameSizeSample == 0)
346 {
347 _channelCritSect->Leave();
348 return 0;
349 }
350 payloadStats.frameSizeStats[n].usageLenSec =
351 (double)payloadStats.frameSizeStats[n].totalEncodedSamples
352 / (double)codecInst.plfreq;
353
354 payloadStats.frameSizeStats[n].rateBitPerSec =
355 payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 /
356 payloadStats.frameSizeStats[n].usageLenSec;
357
358 }
359 _channelCritSect->Leave();
360 return 0;
361}
362
363void
364Channel::Stats(WebRtc_UWord32* numPackets)
365{
366 _channelCritSect->Enter();
367 int k;
368 int n;
369 memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
370 for(k = 0; k < MAX_NUM_PAYLOADS; k++)
371 {
372 if(_payloadStats[k].payloadType == -1)
373 {
374 break;
375 }
376 numPackets[k] = 0;
377 for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
378 {
379 if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
380 {
381 break;
382 }
383 numPackets[k] +=
384 _payloadStats[k].frameSizeStats[n].numPackets;
385 }
386 }
387 _channelCritSect->Leave();
388}
389
390void
391Channel::Stats(WebRtc_UWord8* payloadType, WebRtc_UWord32* payloadLenByte)
392{
393 _channelCritSect->Enter();
394
395 int k;
396 int n;
397 memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
398 for(k = 0; k < MAX_NUM_PAYLOADS; k++)
399 {
400 if(_payloadStats[k].payloadType == -1)
401 {
402 break;
403 }
404 payloadType[k] = (WebRtc_UWord8)_payloadStats[k].payloadType;
405 payloadLenByte[k] = 0;
406 for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
407 {
408 if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
409 {
410 break;
411 }
412 payloadLenByte[k] += (WebRtc_UWord16)
413 _payloadStats[k].frameSizeStats[n].totalPayloadLenByte;
414 }
415 }
416
417 _channelCritSect->Leave();
418}
419
420
421void
422Channel::PrintStats(CodecInst& codecInst)
423{
424 ACMTestPayloadStats payloadStats;
425 Stats(codecInst, payloadStats);
426 printf("%s %d kHz\n",
427 codecInst.plname,
428 codecInst.plfreq / 1000);
429 printf("=====================================================\n");
430 if(payloadStats.payloadType == -1)
431 {
432 printf("No Packets are sent with payload-type %d (%s)\n\n",
433 codecInst.pltype,
434 codecInst.plname);
435 return;
436 }
437 for(int k = 0; k < MAX_NUM_FRAMESIZES; k++)
438 {
439 if(payloadStats.frameSizeStats[k].frameSizeSample == 0)
440 {
441 break;
442 }
443 printf("Frame-size.................... %d samples\n",
444 payloadStats.frameSizeStats[k].frameSizeSample);
445 printf("Average Rate.................. %.0f bits/sec\n",
446 payloadStats.frameSizeStats[k].rateBitPerSec);
447 printf("Maximum Payload-Size.......... %d Bytes\n",
448 payloadStats.frameSizeStats[k].maxPayloadLen);
449 printf("Maximum Instantaneous Rate.... %.0f bits/sec\n",
450 ((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 *
451 (double)codecInst.plfreq) /
452 (double)payloadStats.frameSizeStats[k].frameSizeSample);
453 printf("Number of Packets............. %u\n",
454 (unsigned int)payloadStats.frameSizeStats[k].numPackets);
455 printf("Duration...................... %0.3f sec\n\n",
456 payloadStats.frameSizeStats[k].usageLenSec);
457
458 }
459
460}
461
462WebRtc_UWord32
463Channel::LastInTimestamp()
464{
465 WebRtc_UWord32 timestamp;
466 _channelCritSect->Enter();
467 timestamp = _lastInTimestamp;
468 _channelCritSect->Leave();
469 return timestamp;
470}
471
472double
473Channel::BitRate()
474{
475 double rate;
476 WebRtc_UWord64 currTime = TickTime::MillisecondTimestamp();
477 _channelCritSect->Enter();
478 rate = ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
479 _channelCritSect->Leave();
480 return rate;
481}
tina.legrand@webrtc.org554ae1a2011-12-16 10:09:04 +0000482
483} // namespace webrtc