1 /**
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <AudioRtpPayloadEncoderNode.h>
18 #include <ImsMediaAudioUtil.h>
19 #include <ImsMediaTrace.h>
20 #include <AudioConfig.h>
21 #include <EvsParams.h>
22
23 #define EVS_MODE_SID 12
24
AudioRtpPayloadEncoderNode(BaseSessionCallback * callback)25 AudioRtpPayloadEncoderNode::AudioRtpPayloadEncoderNode(BaseSessionCallback* callback) :
26 BaseNode(callback)
27 {
28 mCodecType = 0;
29 mOctetAligned = false;
30 mPtime = 0;
31 memset(mPayload, 0, sizeof(mPayload));
32 mFirstFrame = false;
33 mTimestamp = 0;
34 mMaxNumOfFrame = 0;
35 mCurrNumOfFrame = 0;
36 mCurrFramePos = 0;
37 mTotalPayloadSize = 0;
38 mEvsBandwidth = kEvsBandwidthNone;
39 mEvsCodecMode = kEvsCodecModePrimary;
40 mEvsOffset = 0;
41 mSendCMR = 0;
42 mEvsMode = kEvsAmrIoModeBitrate00660;
43 mCoreEvsMode = 0;
44 mEvsPayloadHeaderMode = kRtpPayloadHeaderModeEvsCompact;
45 }
46
~AudioRtpPayloadEncoderNode()47 AudioRtpPayloadEncoderNode::~AudioRtpPayloadEncoderNode() {}
48
GetNodeId()49 kBaseNodeId AudioRtpPayloadEncoderNode::GetNodeId()
50 {
51 return kNodeIdAudioPayloadEncoder;
52 }
53
Start()54 ImsMediaResult AudioRtpPayloadEncoderNode::Start()
55 {
56 mMaxNumOfFrame = mPtime / 20;
57 mEvsMode = (kEvsBitrate)ImsMediaAudioUtil::GetMaximumEvsMode(mCoreEvsMode);
58 mEvsCodecMode = (kEvsCodecMode)ImsMediaAudioUtil::ConvertEvsCodecMode(mEvsMode);
59
60 IMLOGD5("[Start] codecType[%d], mode[%d], num of frames[%d], evs bitrate[%d], evs mode[%d]",
61 mCodecType, mOctetAligned, mMaxNumOfFrame, mEvsMode, mEvsCodecMode);
62
63 if (mMaxNumOfFrame == 0 || mMaxNumOfFrame > MAX_FRAME_IN_PACKET)
64 {
65 IMLOGE1("[Start] Invalid ptime [%d]", mPtime);
66 return RESULT_INVALID_PARAM;
67 }
68
69 mCurrNumOfFrame = 0;
70 mCurrFramePos = 0;
71 mFirstFrame = true;
72 mTotalPayloadSize = 0;
73 mNodeState = kNodeStateRunning;
74 return RESULT_SUCCESS;
75 }
76
Stop()77 void AudioRtpPayloadEncoderNode::Stop()
78 {
79 IMLOGD0("[Stop]");
80 mNodeState = kNodeStateStopped;
81 }
82
IsRunTime()83 bool AudioRtpPayloadEncoderNode::IsRunTime()
84 {
85 return true;
86 }
87
IsSourceNode()88 bool AudioRtpPayloadEncoderNode::IsSourceNode()
89 {
90 return false;
91 }
92
OnDataFromFrontNode(ImsMediaSubType,uint8_t * pData,uint32_t nDataSize,uint32_t nTimestamp,bool bMark,uint32_t nSeqNum,ImsMediaSubType nDataType,uint32_t arrivalTime)93 void AudioRtpPayloadEncoderNode::OnDataFromFrontNode(ImsMediaSubType /*subtype*/, uint8_t* pData,
94 uint32_t nDataSize, uint32_t nTimestamp, bool bMark, uint32_t nSeqNum,
95 ImsMediaSubType nDataType, uint32_t arrivalTime)
96 {
97 switch (mCodecType)
98 {
99 case kAudioCodecAmr:
100 case kAudioCodecAmrWb:
101 EncodePayloadAmr(pData, nDataSize, nTimestamp, arrivalTime);
102 break;
103 case kAudioCodecPcmu:
104 case kAudioCodecPcma:
105 SendDataToRearNode(MEDIASUBTYPE_RTPPAYLOAD, pData, nDataSize, nTimestamp, bMark,
106 nSeqNum, nDataType, arrivalTime);
107 break;
108 case kAudioCodecEvs:
109 EncodePayloadEvs(pData, nDataSize, nTimestamp, arrivalTime);
110 break;
111 default:
112 IMLOGE1("[OnDataFromFrontNode] invalid codec type[%d]", mCodecType);
113 SendDataToRearNode(MEDIASUBTYPE_RTPPAYLOAD, pData, nDataSize, nTimestamp, bMark,
114 nSeqNum, nDataType, arrivalTime);
115 break;
116 }
117 }
118
SetConfig(void * config)119 void AudioRtpPayloadEncoderNode::SetConfig(void* config)
120 {
121 AudioConfig* pConfig = reinterpret_cast<AudioConfig*>(config);
122
123 if (pConfig != nullptr)
124 {
125 mCodecType = ImsMediaAudioUtil::ConvertCodecType(pConfig->getCodecType());
126 if (mCodecType == kAudioCodecAmr || mCodecType == kAudioCodecAmrWb)
127 {
128 mOctetAligned = pConfig->getAmrParams().getOctetAligned();
129 }
130 else if (mCodecType == kAudioCodecEvs)
131 {
132 mEvsBandwidth = (kEvsBandwidth)pConfig->getEvsParams().getEvsBandwidth();
133 mEvsPayloadHeaderMode =
134 (kRtpPayloadHeaderMode)pConfig->getEvsParams().getUseHeaderFullOnly();
135 mCoreEvsMode = pConfig->getEvsParams().getEvsMode();
136 mEvsOffset = pConfig->getEvsParams().getChannelAwareMode();
137 mSendCMR = pConfig->getEvsParams().getCodecModeRequest();
138 }
139
140 mPtime = pConfig->getPtimeMillis();
141 }
142 }
143
IsSameConfig(void * config)144 bool AudioRtpPayloadEncoderNode::IsSameConfig(void* config)
145 {
146 if (config == nullptr)
147 return true;
148 AudioConfig* pConfig = reinterpret_cast<AudioConfig*>(config);
149
150 if (mCodecType == ImsMediaAudioUtil::ConvertCodecType(pConfig->getCodecType()))
151 {
152 if (mCodecType == kAudioCodecAmr || mCodecType == kAudioCodecAmrWb)
153 {
154 return (mOctetAligned == pConfig->getAmrParams().getOctetAligned());
155 }
156 else if (mCodecType == kAudioCodecEvs)
157 {
158 return (mEvsBandwidth == (kEvsBandwidth)pConfig->getEvsParams().getEvsBandwidth() &&
159 mEvsPayloadHeaderMode ==
160 (kRtpPayloadHeaderMode)pConfig->getEvsParams().getUseHeaderFullOnly() &&
161 mCoreEvsMode ==
162 ImsMediaAudioUtil::GetMaximumEvsMode(
163 pConfig->getEvsParams().getEvsMode()) &&
164 mEvsOffset == pConfig->getEvsParams().getChannelAwareMode());
165 }
166 }
167
168 return false;
169 }
170
EncodePayloadAmr(uint8_t * pData,uint32_t nDataSize,uint32_t nTimestamp,uint32_t arrivalTime)171 void AudioRtpPayloadEncoderNode::EncodePayloadAmr(
172 uint8_t* pData, uint32_t nDataSize, uint32_t nTimestamp, uint32_t arrivalTime)
173 {
174 uint32_t nCmr = 15;
175 uint32_t f, ft, q, nDataBitSize;
176
177 // remove TOC from the encoder
178 pData++;
179 nDataSize -= 1;
180
181 if (nDataSize > 4)
182 {
183 IMLOGD_PACKET5(IM_PACKET_LOG_PH, "[EncodePayloadAmr] src = %02X %02X %02X %02X, len[%d]",
184 pData[0], pData[1], pData[2], pData[3], nDataSize);
185 }
186
187 IMLOGD_PACKET2(IM_PACKET_LOG_PH, "[EncodePayloadAmr] codectype[%d], octetAligned[%d]",
188 mCodecType, mOctetAligned);
189
190 mCurrNumOfFrame++;
191 f = (mCurrNumOfFrame == mMaxNumOfFrame) ? 0 : 1;
192
193 if (mCodecType == kAudioCodecAmr)
194 {
195 nCmr = 0x0F;
196 ft = ImsMediaAudioUtil::ConvertLenToAmrMode(nDataSize);
197 nDataBitSize = ImsMediaAudioUtil::ConvertAmrModeToBitLen(ft);
198 }
199 else
200 {
201 nCmr = 0x0F;
202 ft = ImsMediaAudioUtil::ConvertLenToAmrWbMode(nDataSize);
203 nDataBitSize = ImsMediaAudioUtil::ConvertAmrWbModeToBitLen(ft);
204 }
205
206 q = 1;
207
208 // the first paylaod
209 if (mCurrNumOfFrame == 1)
210 {
211 memset(mPayload, 0, MAX_AUDIO_PAYLOAD_SIZE);
212 mBWHeader.SetBuffer(mPayload, MAX_AUDIO_PAYLOAD_SIZE);
213 mBWPayload.SetBuffer(mPayload, MAX_AUDIO_PAYLOAD_SIZE);
214 mBWHeader.Write(nCmr, 4);
215
216 if (mOctetAligned == true)
217 {
218 mBWHeader.Write(0, 4);
219 mBWPayload.Seek(8 + mMaxNumOfFrame * 8);
220 }
221 else
222 {
223 mBWPayload.Seek(4 + mMaxNumOfFrame * 6);
224 }
225
226 mTimestamp = nTimestamp;
227 }
228
229 // Payload ToC
230 mBWHeader.Write(f, 1);
231 mBWHeader.Write(ft, 4);
232 mBWHeader.Write(q, 1);
233
234 if (mOctetAligned == true)
235 {
236 mBWHeader.AddPadding();
237 }
238
239 IMLOGD_PACKET2(IM_PACKET_LOG_PH, "[EncodePayloadAmr] nDataBitSize[%d], nDataSize[%d]",
240 nDataBitSize, nDataSize);
241
242 // Speech Frame
243 mBWPayload.WriteByteBuffer(pData, nDataBitSize);
244
245 if (mOctetAligned == true)
246 {
247 mBWPayload.AddPadding();
248 }
249
250 mTotalPayloadSize += nDataSize;
251
252 if (mCurrNumOfFrame == mMaxNumOfFrame)
253 {
254 mBWHeader.Flush();
255 mBWPayload.AddPadding();
256 mBWPayload.Flush();
257 uint32_t nTotalSize = mBWPayload.GetBufferSize();
258
259 IMLOGD_PACKET7(IM_PACKET_LOG_PH,
260 "[EncodePayloadAmr] result = %02X %02X %02X %02X %02X %02X, len[%d]", mPayload[0],
261 mPayload[1], mPayload[2], mPayload[3], mPayload[4], mPayload[5], nTotalSize);
262
263 if (mTotalPayloadSize > 0)
264 {
265 SendDataToRearNode(MEDIASUBTYPE_RTPPAYLOAD, mPayload, nTotalSize, mTimestamp,
266 mFirstFrame, 0, MEDIASUBTYPE_UNDEFINED, arrivalTime);
267 }
268
269 mCurrNumOfFrame = 0;
270 mTotalPayloadSize = 0;
271
272 if (mFirstFrame)
273 {
274 mFirstFrame = false;
275 }
276 }
277 }
278
EncodePayloadEvs(uint8_t * pData,uint32_t nDataSize,uint32_t nTimeStamp,uint32_t arrivalTime)279 void AudioRtpPayloadEncoderNode::EncodePayloadEvs(
280 uint8_t* pData, uint32_t nDataSize, uint32_t nTimeStamp, uint32_t arrivalTime)
281 {
282 if (nDataSize == 0)
283 {
284 return;
285 }
286
287 uint32_t nFrameType = 0;
288 // compact or header-full format, default is compact formats
289 // primary or amr-wb io mode, default is primary mode
290 // primary or amr-wb io mode base on frameSize.
291 mCurrNumOfFrame++;
292
293 if (mEvsPayloadHeaderMode == kRtpPayloadHeaderModeEvsCompact)
294 {
295 memset(mPayload, 0, MAX_AUDIO_PAYLOAD_SIZE);
296 mBWHeader.SetBuffer(mPayload, MAX_AUDIO_PAYLOAD_SIZE);
297 mBWPayload.SetBuffer(mPayload, MAX_AUDIO_PAYLOAD_SIZE);
298
299 mTimestamp = nTimeStamp;
300 // exactly one coded frame without any additional EVS RTP payload header
301 if (mEvsCodecMode == kEvsCodecModePrimary)
302 {
303 // calculate nDataBitSize from nDataSize
304 nFrameType = (uint32_t)ImsMediaAudioUtil::ConvertLenToEVSAudioMode(nDataSize);
305 uint32_t nDataBitSize =
306 ImsMediaAudioUtil::ConvertEVSAudioModeToBitLen((kImsAudioEvsMode)nFrameType);
307
308 if (nDataBitSize == 0)
309 {
310 return;
311 }
312
313 // special case, EVS Primary 2.8 kbps frame in Compact format
314 if (nFrameType == 0)
315 {
316 // First data bit d(0) of the EVS Primary 2.8 kbps is always set to '0'
317 pData[0] = pData[0] & 0x7f;
318 }
319
320 // write speech Frame
321 mBWPayload.WriteByteBuffer(pData, nDataBitSize);
322 mTotalPayloadSize += nDataSize;
323
324 mBWHeader.AddPadding();
325 mBWHeader.Flush();
326
327 mBWPayload.AddPadding();
328 mBWPayload.Flush();
329
330 uint32_t nTotalSize = mBWPayload.GetBufferSize();
331
332 IMLOGD_PACKET7(IM_PACKET_LOG_PH, "[EncodePayloadEvs] result =\
333 %02X %02X %02X %02X %02X %02X, len[%d]",
334 mPayload[0], mPayload[1], mPayload[2], mPayload[3], mPayload[4], mPayload[5],
335 nTotalSize);
336
337 if (mTotalPayloadSize > 0)
338 {
339 SendDataToRearNode(MEDIASUBTYPE_RTPPAYLOAD, mPayload, nTotalSize, mTimestamp,
340 mFirstFrame, 0, MEDIASUBTYPE_UNDEFINED, arrivalTime);
341 }
342
343 mCurrNumOfFrame = 0;
344 mTotalPayloadSize = 0;
345 if (mFirstFrame)
346 mFirstFrame = false;
347 }
348 // one 3-bit CMR field, one coded frame, and zero-padding bits if necessary
349 else if (mEvsCodecMode == kEvsCodecModeAmrIo)
350 {
351 // calculate nDataBitSize from nDataSize
352 nFrameType = (uint32_t)ImsMediaAudioUtil::ConvertLenToAmrWbMode(nDataSize);
353 uint32_t nDataBitSize = ImsMediaAudioUtil::ConvertAmrWbModeToBitLen(nFrameType);
354
355 // 0: 6.6, 1: 8.85, 2: 12.65, 3: 15.85, 4: 18.25, 5: 23.05, 6: 23.85, 7: none
356 // 0111(7) is no request.
357 uint32_t nCmr = 0x07;
358
359 // write CMR except SID
360 // at EVS AMR WB IO Mode, SID packet does not include cmr field...and no processing
361 if (nFrameType != kImsAudioAmrWbModeSID)
362 {
363 mBWHeader.Write(nCmr, 3);
364 mBWPayload.Seek(3);
365
366 // append a speech data bit(0) after the last speech data bit
367 uint8_t nDataBit0 = 0;
368 uint32_t i = 0;
369 uint32_t remain = 0;
370
371 nDataBit0 = pData[0] >> 7;
372 for (i = 0; i < (nDataSize - 1); i++)
373 {
374 pData[i] = pData[i] << 1;
375 pData[i] = pData[i] + (pData[i + 1] >> 7);
376 }
377
378 // set the last speech data byte
379 remain = nDataBitSize % 8;
380 if (remain == 0)
381 remain = 8;
382 pData[nDataSize - 1] = pData[nDataSize - 1] << 1;
383 nDataBit0 = nDataBit0 << (8 - remain);
384 pData[nDataSize - 1] = pData[nDataSize - 1] + nDataBit0;
385 }
386 else // kImsAudioAmrWbModeSID case
387 {
388 // EVS amr io mode's SID is used HF format.
389 // set cmr
390 nCmr = 0xff; // no request - 0xff
391 mBWHeader.Write(nCmr, 8);
392 mBWPayload.Seek(8);
393
394 // set ToC
395 // Header Type identification bit(1bit) - always set to 0
396 uint32_t toc_h = 0;
397 // (1bit - always set to 0 in compact AMR WB IO mode)
398 uint32_t toc_f = 0;
399 // 1 1 1001 - EVS AMR IO Mode, Q bit set 1, 1001 indicate SID packet
400 uint32_t ft = 0x39;
401
402 mBWHeader.Write(toc_h, 1);
403 mBWHeader.Write(toc_f, 1);
404 mBWHeader.Write(ft, 6);
405 mBWPayload.Seek(8);
406 }
407
408 // write speech Frame
409 mBWPayload.WriteByteBuffer(pData, nDataBitSize);
410 mTotalPayloadSize += nDataSize;
411
412 mBWHeader.Flush();
413
414 mBWPayload.AddPadding();
415 mBWPayload.Flush();
416
417 uint32_t nTotalSize = mBWPayload.GetBufferSize();
418
419 IMLOGD_PACKET7(IM_PACKET_LOG_PH,
420 "[EncodePayloadEvs] Result = %02X %02X %02X %02X %02X %02X, len[%d]",
421 mPayload[0], mPayload[1], mPayload[2], mPayload[3], mPayload[4], mPayload[5],
422 nTotalSize);
423
424 if (mTotalPayloadSize > 0)
425 {
426 SendDataToRearNode(MEDIASUBTYPE_RTPPAYLOAD, mPayload, nTotalSize, mTimestamp,
427 mFirstFrame, 0, MEDIASUBTYPE_UNDEFINED, arrivalTime);
428 }
429
430 mCurrNumOfFrame = 0;
431 mTotalPayloadSize = 0;
432 if (mFirstFrame)
433 mFirstFrame = false;
434 }
435 else
436 {
437 IMLOGE0("[EncodePayloadEvs] Invalid codec mode");
438 return;
439 }
440 }
441 else if (mEvsPayloadHeaderMode == kRtpPayloadHeaderModeEvsHeaderFull)
442 {
443 // 0111 1111 is no request.
444 uint32_t nEVSBW = 0x07;
445 uint32_t nEVSBR = 0x0f;
446
447 uint32_t cmr_h, cmr_t, cmr_d = 0; // CMR byte
448 memset(mPayload, 0, MAX_AUDIO_PAYLOAD_SIZE);
449 mBWHeader.SetBuffer(mPayload, MAX_AUDIO_PAYLOAD_SIZE);
450 mBWPayload.SetBuffer(mPayload, MAX_AUDIO_PAYLOAD_SIZE);
451
452 if (mEvsCodecMode == kEvsCodecModePrimary)
453 {
454 nFrameType = (uint32_t)ImsMediaAudioUtil::ConvertLenToEVSAudioMode(nDataSize);
455
456 if (nFrameType == EVS_MODE_SID || mSendCMR == 1) // CMR value
457 {
458 // Header Type identification bit(1bit) - always set to 1
459 cmr_h = 1;
460 // Type of Request(3bits) - NB(000), IO(001), FB(100), WB(101), SWB(110)
461 cmr_t = nEVSBW;
462 // codec mode request(4bits)
463 cmr_d = nEVSBR;
464 }
465
466 // set ToC byte
467 uint32_t toc_h = 0; // Header Type identification bit(1bit) - always set to 0
468 uint32_t toc_f = (mCurrNumOfFrame == mMaxNumOfFrame) ? 0 : 1; // (1bit)
469 uint32_t toc_ft_m = 0; // EVS mode(1bit), Primary mode is 0
470 uint32_t toc_ft_q = 0; // Q bit(1bit) - zero for kEvsCodecModePrimary
471 uint32_t toc_ft_b =
472 ImsMediaAudioUtil::ConvertLenToEVSAudioMode(nDataSize); // EVS bit rate(4bits)
473 uint32_t nDataBitSize =
474 ImsMediaAudioUtil::ConvertEVSAudioModeToBitLen((kImsAudioEvsMode)toc_ft_b);
475
476 // write CMR and seek the position of the first paylaod
477 if (mCurrNumOfFrame == 1)
478 {
479 // set CMR byte - it's optional field...
480 if (nFrameType == EVS_MODE_SID || mSendCMR == 1)
481 {
482 // check writing CMR or not
483 // write CMR byte
484 mBWHeader.Write(cmr_h, 1);
485 mBWHeader.Write(cmr_t, 3);
486 mBWHeader.Write(cmr_d, 4);
487
488 mBWPayload.Seek(8);
489 }
490
491 // ToC field.
492 mBWPayload.Seek(mMaxNumOfFrame * 8); // jump ToC bytes
493 mTimestamp = nTimeStamp; // set timestamp as the first frame
494 }
495
496 // write ToC
497 mBWHeader.Write(toc_h, 1);
498 mBWHeader.Write(toc_f, 1);
499 mBWHeader.Write(toc_ft_m, 1);
500 mBWHeader.Write(toc_ft_q, 1);
501 mBWHeader.Write(toc_ft_b, 4);
502
503 // write Speech Frame
504 mBWPayload.WriteByteBuffer(pData, nDataBitSize);
505 mBWPayload.AddPadding();
506
507 mTotalPayloadSize += nDataSize;
508
509 if (mCurrNumOfFrame == mMaxNumOfFrame)
510 {
511 // mBWHeader.AddPadding();
512 mBWHeader.Flush();
513
514 mBWPayload.AddPadding();
515 mBWPayload.Flush();
516
517 uint32_t nTotalSize = mBWPayload.GetBufferSize();
518 IMLOGD_PACKET7(IM_PACKET_LOG_PH,
519 "[EncodePayloadEvs] Result = %02X %02X %02X %02X %02X %02X, len[%d]",
520 mPayload[0], mPayload[1], mPayload[2], mPayload[3], mPayload[4],
521 mPayload[5], nTotalSize);
522
523 if (mTotalPayloadSize > 0)
524 {
525 SendDataToRearNode(MEDIASUBTYPE_RTPPAYLOAD, mPayload,
526 CheckPaddingNecessity(nTotalSize), mTimestamp, mFirstFrame, 0,
527 MEDIASUBTYPE_UNDEFINED, arrivalTime);
528 }
529
530 mCurrNumOfFrame = 0;
531 mTotalPayloadSize = 0;
532 if (mFirstFrame)
533 mFirstFrame = false;
534 }
535 }
536 else if (mEvsCodecMode == kEvsCodecModeAmrIo)
537 {
538 // set CMR byte
539 // at EVS AMR WB IO Mode, CMR field shall include.
540 // Header Type identification bit(1bit) - always set to 1
541 cmr_h = 1;
542 /* Type of Request(3bits) - NB(000), IO(001), WB(010), SWB(011), FB(100), WB 13.2
543 * channel-aware(101), SWB 13.2 channel-aware(110), reserved(111) */
544 cmr_t = nEVSBW;
545 // codec mode request(4bits) 1111 is no request.
546 cmr_d = nEVSBR;
547
548 // set ToC byte
549 // Header Type identification bit(1bit) - always set to 0
550 uint32_t toc_h = 0;
551 // (1bit)
552 uint32_t toc_f = (mCurrNumOfFrame == mMaxNumOfFrame) ? 0 : 1;
553 // EVS mode(1bit), AMR-WB IO mode is 1
554 uint32_t toc_ft_m = 1;
555 // Q bit(1bit) - 1 for AMR_WB_IO
556 // for ORG EVS to avoid the issue -#EURAVOLTE-567
557 uint32_t toc_ft_q = 1;
558 // EVS AMR WB IO bit rate(4bits)
559 uint32_t toc_ft_b = (uint32_t)ImsMediaAudioUtil::ConvertLenToAmrWbMode(nDataSize);
560 uint32_t nDataBitSize = ImsMediaAudioUtil::ConvertAmrWbModeToBitLen(toc_ft_b);
561
562 // write CMR and seek the position of the first paylaod
563 if (mCurrNumOfFrame == 1)
564 {
565 // write CMR byte
566 mBWHeader.Write(cmr_h, 1);
567 mBWHeader.Write(cmr_t, 3);
568 mBWHeader.Write(cmr_d, 4);
569
570 // seek the position of the first paylaod
571 // add speech data after CMR and ToC
572 mBWPayload.Seek(8 + mMaxNumOfFrame * 8);
573
574 mTimestamp = nTimeStamp; // set timestamp as the first frame
575 }
576
577 // write ToC
578 mBWHeader.Write(toc_h, 1);
579 mBWHeader.Write(toc_f, 1);
580 mBWHeader.Write(toc_ft_m, 1);
581 mBWHeader.Write(toc_ft_q, 1);
582 mBWHeader.Write(toc_ft_b, 4);
583
584 // write Speech Frame
585 mBWPayload.WriteByteBuffer(pData, nDataBitSize);
586 mBWPayload.AddPadding();
587
588 mTotalPayloadSize += nDataSize;
589
590 if (mCurrNumOfFrame == mMaxNumOfFrame)
591 {
592 // mBWHeader.AddPadding();
593 mBWHeader.Flush();
594
595 mBWPayload.AddPadding();
596 mBWPayload.Flush();
597
598 uint32_t nTotalSize = mBWPayload.GetBufferSize();
599
600 IMLOGD_PACKET7(IM_PACKET_LOG_PH,
601 "[EncodePayloadEvs] result = %02X %02X %02X %02X %02X %02X, len[%d]",
602 mPayload[0], mPayload[1], mPayload[2], mPayload[3], mPayload[4],
603 mPayload[5], nTotalSize);
604
605 if (mTotalPayloadSize > 0)
606 {
607 SendDataToRearNode(MEDIASUBTYPE_RTPPAYLOAD, mPayload,
608 CheckPaddingNecessity(nTotalSize), mTimestamp, mFirstFrame, 0,
609 MEDIASUBTYPE_UNDEFINED, arrivalTime);
610 }
611
612 mCurrNumOfFrame = 0;
613 mTotalPayloadSize = 0;
614 if (mFirstFrame)
615 mFirstFrame = false;
616 }
617 }
618 else
619 {
620 IMLOGE0("[EncodePayloadEvs] invalid codec mode");
621 return;
622 }
623 }
624 else
625 {
626 IMLOGE0("[EncodePayloadEvs] invalid payload format");
627 return;
628 }
629
630 return;
631 }
632
CheckPaddingNecessity(uint32_t nTotalSize)633 uint32_t AudioRtpPayloadEncoderNode::CheckPaddingNecessity(uint32_t nTotalSize)
634 {
635 kEvsCodecMode evsCodecMode;
636 uint32_t nEVSCompactId;
637 uint32_t nSize = nTotalSize;
638
639 // check EVS compact size
640 while (nSize != 0 &&
641 ImsMediaAudioUtil::ConvertEVSPayloadMode(nSize, &evsCodecMode, &nEVSCompactId) ==
642 kRtpPayloadHeaderModeEvsCompact)
643 {
644 mPayload[nSize] = 0;
645 nSize++;
646 }
647
648 return nSize;
649 }
650