1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "webrtc/modules/audio_coding/test/Channel.h"
12
13 #include <assert.h>
14 #include <iostream>
15
16 #include "webrtc/base/format_macros.h"
17 #include "webrtc/system_wrappers/include/tick_util.h"
18 #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
19
20 namespace webrtc {
21
SendData(FrameType frameType,uint8_t payloadType,uint32_t timeStamp,const uint8_t * payloadData,size_t payloadSize,const RTPFragmentationHeader * fragmentation)22 int32_t Channel::SendData(FrameType frameType,
23 uint8_t payloadType,
24 uint32_t timeStamp,
25 const uint8_t* payloadData,
26 size_t payloadSize,
27 const RTPFragmentationHeader* fragmentation) {
28 WebRtcRTPHeader rtpInfo;
29 int32_t status;
30 size_t payloadDataSize = payloadSize;
31
32 rtpInfo.header.markerBit = false;
33 rtpInfo.header.ssrc = 0;
34 rtpInfo.header.sequenceNumber = (external_sequence_number_ < 0) ?
35 _seqNo++ : static_cast<uint16_t>(external_sequence_number_);
36 rtpInfo.header.payloadType = payloadType;
37 rtpInfo.header.timestamp = (external_send_timestamp_ < 0) ? timeStamp :
38 static_cast<uint32_t>(external_send_timestamp_);
39
40 if (frameType == kAudioFrameCN) {
41 rtpInfo.type.Audio.isCNG = true;
42 } else {
43 rtpInfo.type.Audio.isCNG = false;
44 }
45 if (frameType == kEmptyFrame) {
46 // When frame is empty, we should not transmit it. The frame size of the
47 // next non-empty frame will be based on the previous frame size.
48 _useLastFrameSize = _lastFrameSizeSample > 0;
49 return 0;
50 }
51
52 rtpInfo.type.Audio.channel = 1;
53 // Treat fragmentation separately
54 if (fragmentation != NULL) {
55 // If silence for too long, send only new data.
56 if ((fragmentation->fragmentationVectorSize == 2) &&
57 (fragmentation->fragmentationTimeDiff[1] <= 0x3fff)) {
58 // only 0x80 if we have multiple blocks
59 _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
60 size_t REDheader = (fragmentation->fragmentationTimeDiff[1] << 10) +
61 fragmentation->fragmentationLength[1];
62 _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
63 _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
64 _payloadData[3] = uint8_t(REDheader & 0x000000FF);
65
66 _payloadData[4] = fragmentation->fragmentationPlType[0];
67 // copy the RED data
68 memcpy(_payloadData + 5,
69 payloadData + fragmentation->fragmentationOffset[1],
70 fragmentation->fragmentationLength[1]);
71 // copy the normal data
72 memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
73 payloadData + fragmentation->fragmentationOffset[0],
74 fragmentation->fragmentationLength[0]);
75 payloadDataSize += 5;
76 } else {
77 // single block (newest one)
78 memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
79 fragmentation->fragmentationLength[0]);
80 payloadDataSize = fragmentation->fragmentationLength[0];
81 rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
82 }
83 } else {
84 memcpy(_payloadData, payloadData, payloadDataSize);
85 if (_isStereo) {
86 if (_leftChannel) {
87 memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
88 _leftChannel = false;
89 rtpInfo.type.Audio.channel = 1;
90 } else {
91 memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
92 _leftChannel = true;
93 rtpInfo.type.Audio.channel = 2;
94 }
95 }
96 }
97
98 _channelCritSect->Enter();
99 if (_saveBitStream) {
100 //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
101 }
102
103 if (!_isStereo) {
104 CalcStatistics(rtpInfo, payloadSize);
105 }
106 _useLastFrameSize = false;
107 _lastInTimestamp = timeStamp;
108 _totalBytes += payloadDataSize;
109 _channelCritSect->Leave();
110
111 if (_useFECTestWithPacketLoss) {
112 _packetLoss += 1;
113 if (_packetLoss == 3) {
114 _packetLoss = 0;
115 return 0;
116 }
117 }
118
119 if (num_packets_to_drop_ > 0) {
120 num_packets_to_drop_--;
121 return 0;
122 }
123
124 status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize, rtpInfo);
125
126 return status;
127 }
128
129 // TODO(turajs): rewite this method.
CalcStatistics(WebRtcRTPHeader & rtpInfo,size_t payloadSize)130 void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, size_t payloadSize) {
131 int n;
132 if ((rtpInfo.header.payloadType != _lastPayloadType)
133 && (_lastPayloadType != -1)) {
134 // payload-type is changed.
135 // we have to terminate the calculations on the previous payload type
136 // we ignore the last packet in that payload type just to make things
137 // easier.
138 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
139 if (_lastPayloadType == _payloadStats[n].payloadType) {
140 _payloadStats[n].newPacket = true;
141 break;
142 }
143 }
144 }
145 _lastPayloadType = rtpInfo.header.payloadType;
146
147 bool newPayload = true;
148 ACMTestPayloadStats* currentPayloadStr = NULL;
149 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
150 if (rtpInfo.header.payloadType == _payloadStats[n].payloadType) {
151 newPayload = false;
152 currentPayloadStr = &_payloadStats[n];
153 break;
154 }
155 }
156
157 if (!newPayload) {
158 if (!currentPayloadStr->newPacket) {
159 if (!_useLastFrameSize) {
160 _lastFrameSizeSample = (uint32_t) ((uint32_t) rtpInfo.header.timestamp -
161 (uint32_t) currentPayloadStr->lastTimestamp);
162 }
163 assert(_lastFrameSizeSample > 0);
164 int k = 0;
165 for (; k < MAX_NUM_FRAMESIZES; ++k) {
166 if ((currentPayloadStr->frameSizeStats[k].frameSizeSample ==
167 _lastFrameSizeSample) ||
168 (currentPayloadStr->frameSizeStats[k].frameSizeSample == 0)) {
169 break;
170 }
171 }
172 if (k == MAX_NUM_FRAMESIZES) {
173 // New frame size found but no space to count statistics on it. Skip it.
174 printf("No memory to store statistics for payload %d : frame size %d\n",
175 _lastPayloadType, _lastFrameSizeSample);
176 return;
177 }
178 ACMTestFrameSizeStats* currentFrameSizeStats = &(currentPayloadStr
179 ->frameSizeStats[k]);
180 currentFrameSizeStats->frameSizeSample = (int16_t) _lastFrameSizeSample;
181
182 // increment the number of encoded samples.
183 currentFrameSizeStats->totalEncodedSamples += _lastFrameSizeSample;
184 // increment the number of recveived packets
185 currentFrameSizeStats->numPackets++;
186 // increment the total number of bytes (this is based on
187 // the previous payload we don't know the frame-size of
188 // the current payload.
189 currentFrameSizeStats->totalPayloadLenByte += currentPayloadStr
190 ->lastPayloadLenByte;
191 // store the maximum payload-size (this is based on
192 // the previous payload we don't know the frame-size of
193 // the current payload.
194 if (currentFrameSizeStats->maxPayloadLen
195 < currentPayloadStr->lastPayloadLenByte) {
196 currentFrameSizeStats->maxPayloadLen = currentPayloadStr
197 ->lastPayloadLenByte;
198 }
199 // store the current values for the next time
200 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
201 currentPayloadStr->lastPayloadLenByte = payloadSize;
202 } else {
203 currentPayloadStr->newPacket = false;
204 currentPayloadStr->lastPayloadLenByte = payloadSize;
205 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
206 currentPayloadStr->payloadType = rtpInfo.header.payloadType;
207 memset(currentPayloadStr->frameSizeStats, 0, MAX_NUM_FRAMESIZES *
208 sizeof(ACMTestFrameSizeStats));
209 }
210 } else {
211 n = 0;
212 while (_payloadStats[n].payloadType != -1) {
213 n++;
214 }
215 // first packet
216 _payloadStats[n].newPacket = false;
217 _payloadStats[n].lastPayloadLenByte = payloadSize;
218 _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
219 _payloadStats[n].payloadType = rtpInfo.header.payloadType;
220 memset(_payloadStats[n].frameSizeStats, 0, MAX_NUM_FRAMESIZES *
221 sizeof(ACMTestFrameSizeStats));
222 }
223 }
224
Channel(int16_t chID)225 Channel::Channel(int16_t chID)
226 : _receiverACM(NULL),
227 _seqNo(0),
228 _channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
229 _bitStreamFile(NULL),
230 _saveBitStream(false),
231 _lastPayloadType(-1),
232 _isStereo(false),
233 _leftChannel(true),
234 _lastInTimestamp(0),
235 _useLastFrameSize(false),
236 _lastFrameSizeSample(0),
237 _packetLoss(0),
238 _useFECTestWithPacketLoss(false),
239 _beginTime(TickTime::MillisecondTimestamp()),
240 _totalBytes(0),
241 external_send_timestamp_(-1),
242 external_sequence_number_(-1),
243 num_packets_to_drop_(0) {
244 int n;
245 int k;
246 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
247 _payloadStats[n].payloadType = -1;
248 _payloadStats[n].newPacket = true;
249 for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
250 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
251 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
252 _payloadStats[n].frameSizeStats[k].numPackets = 0;
253 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
254 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
255 }
256 }
257 if (chID >= 0) {
258 _saveBitStream = true;
259 char bitStreamFileName[500];
260 sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
261 _bitStreamFile = fopen(bitStreamFileName, "wb");
262 } else {
263 _saveBitStream = false;
264 }
265 }
266
~Channel()267 Channel::~Channel() {
268 delete _channelCritSect;
269 }
270
RegisterReceiverACM(AudioCodingModule * acm)271 void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
272 _receiverACM = acm;
273 return;
274 }
275
ResetStats()276 void Channel::ResetStats() {
277 int n;
278 int k;
279 _channelCritSect->Enter();
280 _lastPayloadType = -1;
281 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
282 _payloadStats[n].payloadType = -1;
283 _payloadStats[n].newPacket = true;
284 for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
285 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
286 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
287 _payloadStats[n].frameSizeStats[k].numPackets = 0;
288 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
289 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
290 }
291 }
292 _beginTime = TickTime::MillisecondTimestamp();
293 _totalBytes = 0;
294 _channelCritSect->Leave();
295 }
296
Stats(CodecInst & codecInst,ACMTestPayloadStats & payloadStats)297 int16_t Channel::Stats(CodecInst& codecInst,
298 ACMTestPayloadStats& payloadStats) {
299 _channelCritSect->Enter();
300 int n;
301 payloadStats.payloadType = -1;
302 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
303 if (_payloadStats[n].payloadType == codecInst.pltype) {
304 memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
305 break;
306 }
307 }
308 if (payloadStats.payloadType == -1) {
309 _channelCritSect->Leave();
310 return -1;
311 }
312 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
313 if (payloadStats.frameSizeStats[n].frameSizeSample == 0) {
314 _channelCritSect->Leave();
315 return 0;
316 }
317 payloadStats.frameSizeStats[n].usageLenSec = (double) payloadStats
318 .frameSizeStats[n].totalEncodedSamples / (double) codecInst.plfreq;
319
320 payloadStats.frameSizeStats[n].rateBitPerSec =
321 payloadStats.frameSizeStats[n].totalPayloadLenByte * 8
322 / payloadStats.frameSizeStats[n].usageLenSec;
323
324 }
325 _channelCritSect->Leave();
326 return 0;
327 }
328
Stats(uint32_t * numPackets)329 void Channel::Stats(uint32_t* numPackets) {
330 _channelCritSect->Enter();
331 int k;
332 int n;
333 memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
334 for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
335 if (_payloadStats[k].payloadType == -1) {
336 break;
337 }
338 numPackets[k] = 0;
339 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
340 if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
341 break;
342 }
343 numPackets[k] += _payloadStats[k].frameSizeStats[n].numPackets;
344 }
345 }
346 _channelCritSect->Leave();
347 }
348
Stats(uint8_t * payloadType,uint32_t * payloadLenByte)349 void Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte) {
350 _channelCritSect->Enter();
351
352 int k;
353 int n;
354 memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
355 for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
356 if (_payloadStats[k].payloadType == -1) {
357 break;
358 }
359 payloadType[k] = (uint8_t) _payloadStats[k].payloadType;
360 payloadLenByte[k] = 0;
361 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
362 if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
363 break;
364 }
365 payloadLenByte[k] += (uint16_t) _payloadStats[k].frameSizeStats[n]
366 .totalPayloadLenByte;
367 }
368 }
369
370 _channelCritSect->Leave();
371 }
372
PrintStats(CodecInst & codecInst)373 void Channel::PrintStats(CodecInst& codecInst) {
374 ACMTestPayloadStats payloadStats;
375 Stats(codecInst, payloadStats);
376 printf("%s %d kHz\n", codecInst.plname, codecInst.plfreq / 1000);
377 printf("=====================================================\n");
378 if (payloadStats.payloadType == -1) {
379 printf("No Packets are sent with payload-type %d (%s)\n\n",
380 codecInst.pltype, codecInst.plname);
381 return;
382 }
383 for (int k = 0; k < MAX_NUM_FRAMESIZES; k++) {
384 if (payloadStats.frameSizeStats[k].frameSizeSample == 0) {
385 break;
386 }
387 printf("Frame-size.................... %d samples\n",
388 payloadStats.frameSizeStats[k].frameSizeSample);
389 printf("Average Rate.................. %.0f bits/sec\n",
390 payloadStats.frameSizeStats[k].rateBitPerSec);
391 printf("Maximum Payload-Size.......... %" PRIuS " Bytes\n",
392 payloadStats.frameSizeStats[k].maxPayloadLen);
393 printf(
394 "Maximum Instantaneous Rate.... %.0f bits/sec\n",
395 ((double) payloadStats.frameSizeStats[k].maxPayloadLen * 8.0
396 * (double) codecInst.plfreq)
397 / (double) payloadStats.frameSizeStats[k].frameSizeSample);
398 printf("Number of Packets............. %u\n",
399 (unsigned int) payloadStats.frameSizeStats[k].numPackets);
400 printf("Duration...................... %0.3f sec\n\n",
401 payloadStats.frameSizeStats[k].usageLenSec);
402
403 }
404
405 }
406
LastInTimestamp()407 uint32_t Channel::LastInTimestamp() {
408 uint32_t timestamp;
409 _channelCritSect->Enter();
410 timestamp = _lastInTimestamp;
411 _channelCritSect->Leave();
412 return timestamp;
413 }
414
BitRate()415 double Channel::BitRate() {
416 double rate;
417 uint64_t currTime = TickTime::MillisecondTimestamp();
418 _channelCritSect->Enter();
419 rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
420 _channelCritSect->Leave();
421 return rate;
422 }
423
424 } // namespace webrtc
425