• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <RtpEncoderNode.h>
18 #include <ImsMediaTimer.h>
19 #include <ImsMediaTrace.h>
20 #include <ImsMediaVideoUtil.h>
21 #include <AudioConfig.h>
22 #include <VideoConfig.h>
23 #include <TextConfig.h>
24 #include <string.h>
25 
RtpEncoderNode(BaseSessionCallback * callback)26 RtpEncoderNode::RtpEncoderNode(BaseSessionCallback* callback) :
27         BaseNode(callback)
28 {
29     mRtpSession = nullptr;
30     mDTMFMode = false;
31     mMark = false;
32     mPrevTimestamp = 0;
33     mSamplingRate = 0;
34     mRtpPayloadTx = 0;
35     mRtpPayloadRx = 0;
36     mRtpTxDtmfPayload = 0;
37     mRtpRxDtmfPayload = 0;
38     mDtmfSamplingRate = 0;
39     mDtmfTimestamp = 0;
40     mCvoValue = CVO_DEFINE_NONE;
41     mRedundantLevel = 0;
42     mRedundantPayload = 0;
43 }
44 
~RtpEncoderNode()45 RtpEncoderNode::~RtpEncoderNode()
46 {
47     // remove IRtpSession here to avoid shared instance in other node from unable to use
48     if (mRtpSession)
49     {
50         mRtpSession->StopRtp();
51         mRtpSession->SetRtpEncoderListener(nullptr);
52         IRtpSession::ReleaseInstance(mRtpSession);
53         mRtpSession = nullptr;
54     }
55 }
56 
GetNodeId()57 kBaseNodeId RtpEncoderNode::GetNodeId()
58 {
59     return kNodeIdRtpEncoder;
60 }
61 
Start()62 ImsMediaResult RtpEncoderNode::Start()
63 {
64     IMLOGD1("[Start] type[%d]", mMediaType);
65     bool bResetSsrc = false;
66 
67     if (mRtpPayloadTx == 0 || mRtpPayloadRx == 0)
68     {
69         IMLOGE0("[Start] invalid payload number");
70         return RESULT_INVALID_PARAM;
71     }
72 
73     if (mRtpSession == nullptr)
74     {
75         mRtpSession = IRtpSession::GetInstance(mMediaType, mLocalAddress, mPeerAddress);
76 
77         if (mRtpSession == nullptr)
78         {
79             IMLOGE0("[Start] Can't create rtp session");
80             return RESULT_NOT_READY;
81         }
82     }
83 
84     mRtpSession->SetRtpEncoderListener(this);
85 
86     if (mMediaType == IMS_MEDIA_AUDIO)
87     {
88         mRtpSession->SetRtpPayloadParam(mRtpPayloadTx, mRtpPayloadRx, mSamplingRate * 1000,
89                 mRtpTxDtmfPayload, mRtpRxDtmfPayload, mDtmfSamplingRate * 1000);
90     }
91     else if (mMediaType == IMS_MEDIA_VIDEO)
92     {
93         mRtpSession->SetRtpPayloadParam(mRtpPayloadTx, mRtpPayloadRx, mSamplingRate * 1000);
94     }
95     else if (mMediaType == IMS_MEDIA_TEXT)
96     {
97         bResetSsrc = true;
98         if (mRedundantPayload > 0)
99         {
100             mRtpSession->SetRtpPayloadParam(mRtpPayloadTx, mRtpPayloadRx, mSamplingRate * 1000,
101                     mRedundantPayload, mSamplingRate * 1000);
102         }
103         else
104         {
105             mRtpSession->SetRtpPayloadParam(mRtpPayloadTx, mRtpPayloadRx, mSamplingRate * 1000);
106         }
107 
108         if (mRtpContextParams.getSequenceNumber() > 0)
109         {
110             // Set the next sequence number to use by RTP stack.
111             mRtpSession->SetRtpContext(mRtpContextParams.getSsrc(),
112                     mRtpContextParams.getTimestamp(), mRtpContextParams.getSequenceNumber() + 1);
113         }
114     }
115 
116     mRtpSession->StartRtp(bResetSsrc);
117     mDTMFMode = false;
118     mMark = true;
119     mPrevTimestamp = 0;
120 #ifdef DEBUG_JITTER_GEN_SIMULATION_DELAY
121     mNextTime = 0;
122 #endif
123 #ifdef DEBUG_JITTER_GEN_SIMULATION_REORDER
124     jitterData.Clear();
125     mReorderDataCount = 0;
126 #endif
127     mNodeState = kNodeStateRunning;
128     return RESULT_SUCCESS;
129 }
130 
Stop()131 void RtpEncoderNode::Stop()
132 {
133     IMLOGD1("[Stop] type[%d]", mMediaType);
134     std::lock_guard<std::mutex> guard(mMutex);
135 
136     if (mRtpSession)
137     {
138         mRtpSession->StopRtp();
139     }
140 
141     mRtpContextParams.setDefaultConfig();
142 
143     ClearDataQueue();
144     mNodeState = kNodeStateStopped;
145 }
146 
ProcessData()147 void RtpEncoderNode::ProcessData()
148 {
149     std::lock_guard<std::mutex> guard(mMutex);
150 
151     if (mNodeState != kNodeStateRunning)
152     {
153         return;
154     }
155 
156     ImsMediaSubType subtype;
157     uint8_t* data = nullptr;
158     uint32_t size = 0;
159     uint32_t timestamp = 0;
160     bool mark = false;
161     uint32_t seq = 0;
162     ImsMediaSubType datatype;
163     uint32_t arrivalTime = 0;
164 
165     if (GetData(&subtype, &data, &size, &timestamp, &mark, &seq, &datatype, &arrivalTime))
166     {
167         if (mMediaType == IMS_MEDIA_AUDIO)
168         {
169             if (!ProcessAudioData(subtype, data, size))
170             {
171                 return;
172             }
173         }
174         else if (mMediaType == IMS_MEDIA_VIDEO)
175         {
176             ProcessVideoData(subtype, data, size, timestamp, mark);
177         }
178         else if (mMediaType == IMS_MEDIA_TEXT)
179         {
180             ProcessTextData(subtype, data, size, timestamp, mark);
181         }
182 
183         DeleteData();
184     }
185 }
186 
IsRunTime()187 bool RtpEncoderNode::IsRunTime()
188 {
189     return false;
190 }
191 
IsSourceNode()192 bool RtpEncoderNode::IsSourceNode()
193 {
194     return false;
195 }
196 
SetConfig(void * config)197 void RtpEncoderNode::SetConfig(void* config)
198 {
199     IMLOGD1("[SetConfig] media[%d]", mMediaType);
200 
201     if (config == nullptr)
202     {
203         return;
204     }
205 
206     if (mMediaType == IMS_MEDIA_AUDIO)
207     {
208         AudioConfig* pConfig = reinterpret_cast<AudioConfig*>(config);
209         mPeerAddress = RtpAddress(pConfig->getRemoteAddress().c_str(), pConfig->getRemotePort());
210         mSamplingRate = pConfig->getSamplingRateKHz();
211         mRtpPayloadTx = pConfig->getTxPayloadTypeNumber();
212         mRtpPayloadRx = pConfig->getRxPayloadTypeNumber();
213         mRtpTxDtmfPayload = pConfig->getTxDtmfPayloadTypeNumber();
214         mRtpRxDtmfPayload = pConfig->getRxDtmfPayloadTypeNumber();
215         mDtmfSamplingRate = pConfig->getDtmfsamplingRateKHz();
216     }
217     else if (mMediaType == IMS_MEDIA_VIDEO)
218     {
219         VideoConfig* pConfig = reinterpret_cast<VideoConfig*>(config);
220         mPeerAddress = RtpAddress(pConfig->getRemoteAddress().c_str(), pConfig->getRemotePort());
221         mSamplingRate = pConfig->getSamplingRateKHz();
222         mRtpPayloadTx = pConfig->getTxPayloadTypeNumber();
223         mRtpPayloadRx = pConfig->getRxPayloadTypeNumber();
224         mCvoValue = pConfig->getCvoValue();
225     }
226     else if (mMediaType == IMS_MEDIA_TEXT)
227     {
228         TextConfig* pConfig = reinterpret_cast<TextConfig*>(config);
229         mPeerAddress = RtpAddress(pConfig->getRemoteAddress().c_str(), pConfig->getRemotePort());
230         mSamplingRate = pConfig->getSamplingRateKHz();
231         mRtpPayloadTx = pConfig->getTxPayloadTypeNumber();
232         mRtpPayloadRx = pConfig->getRxPayloadTypeNumber();
233         mRedundantPayload = pConfig->getRedundantPayload();
234         mRedundantLevel = pConfig->getRedundantLevel();
235 
236         RtpContextParams rtpContextParams = pConfig->getRtpContextParams();
237 
238         // TODO: #include <aidl/android/hardware/radio/AccessNetwork.h>
239         if (pConfig->getAccessNetwork() == 5 && rtpContextParams.getSequenceNumber() > 0)
240         {
241             SetRtpContext(rtpContextParams);
242         }
243     }
244 
245     IMLOGD2("[SetConfig] peer Ip[%s], port[%d]", mPeerAddress.ipAddress, mPeerAddress.port);
246 }
247 
IsSameConfig(void * config)248 bool RtpEncoderNode::IsSameConfig(void* config)
249 {
250     if (config == nullptr)
251     {
252         return true;
253     }
254 
255     if (mMediaType == IMS_MEDIA_AUDIO)
256     {
257         AudioConfig* pConfig = reinterpret_cast<AudioConfig*>(config);
258         return (mPeerAddress ==
259                         RtpAddress(pConfig->getRemoteAddress().c_str(), pConfig->getRemotePort()) &&
260                 mSamplingRate == pConfig->getSamplingRateKHz() &&
261                 mRtpPayloadTx == pConfig->getTxPayloadTypeNumber() &&
262                 mRtpPayloadRx == pConfig->getRxPayloadTypeNumber() &&
263                 mRtpTxDtmfPayload == pConfig->getTxDtmfPayloadTypeNumber() &&
264                 mRtpRxDtmfPayload == pConfig->getRxDtmfPayloadTypeNumber() &&
265                 mDtmfSamplingRate == pConfig->getDtmfsamplingRateKHz());
266     }
267     else if (mMediaType == IMS_MEDIA_VIDEO)
268     {
269         VideoConfig* pConfig = reinterpret_cast<VideoConfig*>(config);
270         return (mPeerAddress ==
271                         RtpAddress(pConfig->getRemoteAddress().c_str(), pConfig->getRemotePort()) &&
272                 mSamplingRate == pConfig->getSamplingRateKHz() &&
273                 mRtpPayloadTx == pConfig->getTxPayloadTypeNumber() &&
274                 mRtpPayloadRx == pConfig->getRxPayloadTypeNumber() &&
275                 mCvoValue == pConfig->getCvoValue());
276     }
277     else if (mMediaType == IMS_MEDIA_TEXT)
278     {
279         TextConfig* pConfig = reinterpret_cast<TextConfig*>(config);
280         return (mPeerAddress ==
281                         RtpAddress(pConfig->getRemoteAddress().c_str(), pConfig->getRemotePort()) &&
282                 mRtpContextParams == pConfig->getRtpContextParams() &&
283                 mSamplingRate == pConfig->getSamplingRateKHz() &&
284                 mRtpPayloadTx == pConfig->getTxPayloadTypeNumber() &&
285                 mRtpPayloadRx == pConfig->getRxPayloadTypeNumber() &&
286                 mRedundantPayload == pConfig->getRedundantPayload() &&
287                 mRedundantLevel == pConfig->getRedundantLevel());
288     }
289 
290     return false;
291 }
292 
OnRtpPacket(unsigned char * data,uint32_t nSize)293 void RtpEncoderNode::OnRtpPacket(unsigned char* data, uint32_t nSize)
294 {
295     SendDataToRearNode(MEDIASUBTYPE_RTPPACKET, data, nSize, 0, 0, 0);
296 }
297 
SetLocalAddress(const RtpAddress & address)298 void RtpEncoderNode::SetLocalAddress(const RtpAddress& address)
299 {
300     mLocalAddress = address;
301 }
302 
SetPeerAddress(const RtpAddress & address)303 void RtpEncoderNode::SetPeerAddress(const RtpAddress& address)
304 {
305     mPeerAddress = address;
306 }
307 
SetCvoExtension(const int64_t facing,const int64_t orientation)308 bool RtpEncoderNode::SetCvoExtension(const int64_t facing, const int64_t orientation)
309 {
310     IMLOGD3("[SetCvoExtension] cvoValue[%d], facing[%ld], orientation[%ld]", mCvoValue, facing,
311             orientation);
312 
313     if (mCvoValue > 0)
314     {
315         uint32_t rotation = 0;
316         uint32_t cameraId = 0;
317 
318         if (facing == kCameraFacingRear)
319         {
320             cameraId = 1;
321         }
322 
323         switch (orientation)
324         {
325             default:
326             case 0:
327                 rotation = 0;
328                 break;
329             case 270:
330                 rotation = 1;
331                 break;
332             case 180:
333                 rotation = 2;
334                 break;
335             case 90:
336                 rotation = 3;
337                 break;
338         }
339 
340         if (cameraId == 1)  // rear camera
341         {
342             if (rotation == 1)  // CCW90
343             {
344                 rotation = 3;
345             }
346             else if (rotation == 3)  // CCW270
347             {
348                 rotation = 1;
349             }
350         }
351 
352         int8_t extensionData[4];  // 32bit
353         IMLOGD3("[SetCvoExtension] cvoValue[%d], facing[%d], orientation[%d]", mCvoValue, cameraId,
354                 rotation);
355 
356         extensionData[0] = (mCvoValue << 4) | 1;  // local identifier and data length
357         extensionData[1] = (cameraId << 3) | rotation;
358         extensionData[2] = 0;  // padding
359         extensionData[3] = 0;  // padding
360 
361         mListRtpExtension.clear();
362         mListRtpExtension.push_back(RtpHeaderExtensionInfo(
363                 RtpHeaderExtensionInfo::kBitPatternForOneByteHeader, 1, extensionData, 4));
364         return true;
365     }
366 
367     return false;
368 }
369 
SetRtpHeaderExtension(std::list<RtpHeaderExtension> * listExtension)370 void RtpEncoderNode::SetRtpHeaderExtension(std::list<RtpHeaderExtension>* listExtension)
371 {
372     if (listExtension == nullptr || listExtension->empty())
373     {
374         return;
375     }
376 
377     /**
378      * Check number of byte of the header. Based on RFC8285 4.2, one byte header has a local
379      * identifier in range of 1 to 14. Two byte header has is a range of 1 to 255.
380      */
381     bool useTwoByteHeader = false;
382     int32_t totalPayloadLength = 0;  // accumulate payload length except the header size
383 
384     for (auto extension : *listExtension)
385     {
386         if (extension.getLocalIdentifier() > 15)
387         {
388             useTwoByteHeader = true;
389         }
390 
391         totalPayloadLength += extension.getExtensionDataSize();
392     }
393 
394     // accumulate header size
395     useTwoByteHeader ? totalPayloadLength += 2 * listExtension->size()
396                      : totalPayloadLength += listExtension->size();
397 
398     // padding size
399     int32_t paddingSize = totalPayloadLength % IMS_MEDIA_WORD_SIZE == 0
400             ? 0
401             : IMS_MEDIA_WORD_SIZE - totalPayloadLength % IMS_MEDIA_WORD_SIZE;
402     totalPayloadLength += paddingSize;
403 
404     int8_t* extensionData = new int8_t[totalPayloadLength];
405     int offset = 0;
406 
407     for (auto extension : *listExtension)
408     {
409         if (useTwoByteHeader)
410         {
411             extensionData[offset++] = extension.getLocalIdentifier();
412             extensionData[offset++] = extension.getExtensionDataSize();
413         }
414         else
415         {
416             extensionData[offset++] =
417                     extension.getLocalIdentifier() << 4 | (extension.getExtensionDataSize() - 1);
418         }
419 
420         memcpy(extensionData + offset, extension.getExtensionData(),
421                 extension.getExtensionDataSize());
422         offset += extension.getExtensionDataSize();
423     }
424 
425     // add padding
426     memset(extensionData + offset, 0, paddingSize);
427 
428     IMLOGD3("[SetRtpHeaderExtension] twoByte[%d], size[%d], list size[%d]", useTwoByteHeader,
429             totalPayloadLength, listExtension->size());
430 
431     int16_t defineByProfile = useTwoByteHeader
432             ? RtpHeaderExtensionInfo::kBitPatternForTwoByteHeader
433             : RtpHeaderExtensionInfo::kBitPatternForOneByteHeader;
434     mListRtpExtension.push_back(RtpHeaderExtensionInfo(
435             defineByProfile, totalPayloadLength / 4, extensionData, totalPayloadLength));
436 
437     delete[] extensionData;
438 }
439 
ProcessAudioData(ImsMediaSubType subtype,uint8_t * data,uint32_t size)440 bool RtpEncoderNode::ProcessAudioData(ImsMediaSubType subtype, uint8_t* data, uint32_t size)
441 {
442     uint32_t currentTimestamp;
443     uint32_t timeDiff;
444     uint32_t timestampDiff;
445 
446     if (subtype == MEDIASUBTYPE_DTMFSTART)
447     {
448         IMLOGD0("[ProcessAudioData] SetDTMF mode true");
449         mDTMFMode = true;
450         mMark = true;
451     }
452     else if (subtype == MEDIASUBTYPE_DTMFEND)
453     {
454         IMLOGD0("[ProcessAudioData] SetDTMF mode false");
455         mDTMFMode = false;
456         mMark = true;
457     }
458     else if (subtype == MEDIASUBTYPE_DTMF_PAYLOAD)
459     {
460         if (mDTMFMode)
461         {
462             currentTimestamp = ImsMediaTimer::GetTimeInMilliSeconds();
463             timeDiff = currentTimestamp - mPrevTimestamp;
464 
465             if (timeDiff < 20)
466             {
467                 return false;
468             }
469 
470             mMark ? mDtmfTimestamp = currentTimestamp : timeDiff = 0;
471             mPrevTimestamp = currentTimestamp;
472             timestampDiff = timeDiff * mSamplingRate;
473 
474             IMLOGD_PACKET3(IM_PACKET_LOG_RTP,
475                     "[ProcessAudioData] dtmf payload, size[%u], TS[%u], diff[%d]", size,
476                     mDtmfTimestamp, timestampDiff);
477             mRtpSession->SendRtpPacket(
478                     mRtpTxDtmfPayload, data, size, mDtmfTimestamp, mMark, timestampDiff);
479             mMark = false;
480         }
481     }
482     else  // MEDIASUBTYPE_RTPPAYLOAD
483     {
484         if (mDTMFMode == false)
485         {
486             currentTimestamp = ImsMediaTimer::GetTimeInMilliSeconds();
487 
488             if (mPrevTimestamp == 0)
489             {
490                 timeDiff = 0;
491                 mPrevTimestamp = currentTimestamp;
492             }
493             else
494             {
495                 timeDiff = ((currentTimestamp - mPrevTimestamp) + 5) / 20 * 20;
496 
497                 if (timeDiff > 20)
498                 {
499                     mPrevTimestamp = currentTimestamp;
500                 }
501                 else if (timeDiff == 0)
502                 {
503                     return false;
504                 }
505                 else
506                 {
507                     mPrevTimestamp += timeDiff;
508                 }
509             }
510 
511             RtpPacket* packet = new RtpPacket();
512             packet->rtpDataType = kRtpDataTypeNormal;
513             mCallback->SendEvent(
514                     kCollectPacketInfo, kStreamRtpTx, reinterpret_cast<uint64_t>(packet));
515 
516             timestampDiff = timeDiff * mSamplingRate;
517             IMLOGD_PACKET3(IM_PACKET_LOG_RTP, "[ProcessAudioData] size[%u], TS[%u], diff[%d]", size,
518                     currentTimestamp, timestampDiff);
519 
520             if (!mListRtpExtension.empty())
521             {
522                 mRtpSession->SendRtpPacket(mRtpPayloadTx, data, size, currentTimestamp, mMark,
523                         timestampDiff, &mListRtpExtension.front());
524                 mListRtpExtension.pop_front();
525             }
526             else
527             {
528                 mRtpSession->SendRtpPacket(
529                         mRtpPayloadTx, data, size, currentTimestamp, mMark, timestampDiff);
530             }
531 
532             if (mMark)
533             {
534                 mMark = false;
535             }
536         }
537     }
538 
539     return true;
540 }
541 
ProcessVideoData(ImsMediaSubType subtype,uint8_t * data,uint32_t size,uint32_t timestamp,bool mark)542 void RtpEncoderNode::ProcessVideoData(
543         ImsMediaSubType subtype, uint8_t* data, uint32_t size, uint32_t timestamp, bool mark)
544 {
545     IMLOGD_PACKET4(IM_PACKET_LOG_RTP, "[ProcessVideoData] subtype[%d], size[%d], TS[%u], mark[%d]",
546             subtype, size, timestamp, mark);
547 
548 #ifdef SIMULATE_VIDEO_CVO_UPDATE
549     const int64_t kCameraFacing = kCameraFacingFront;
550     static int64_t sDeviceOrientation = 0;
551     static int64_t sCount = 0;
552     if ((++sCount % 100) == 0)
553     {
554         SetCvoExtension(kCameraFacing, (sDeviceOrientation += 90) % 360);
555     }
556 #endif
557 
558     if (mCvoValue > 0 && mark && subtype == MEDIASUBTYPE_VIDEO_IDR_FRAME)
559     {
560         mRtpSession->SendRtpPacket(mRtpPayloadTx, data, size, timestamp, mark, 0,
561                 mListRtpExtension.empty() ? nullptr : &mListRtpExtension.front());
562     }
563     else
564     {
565         mRtpSession->SendRtpPacket(mRtpPayloadTx, data, size, timestamp, mark, 0);
566     }
567 }
568 
ProcessTextData(ImsMediaSubType subtype,uint8_t * data,uint32_t size,uint32_t timestamp,bool mark)569 void RtpEncoderNode::ProcessTextData(
570         ImsMediaSubType subtype, uint8_t* data, uint32_t size, uint32_t timestamp, bool mark)
571 {
572     IMLOGD_PACKET4(IM_PACKET_LOG_RTP,
573             "[ProcessTextData] subtype[%d], size[%d], timestamp[%d], mark[%d]", subtype, size,
574             timestamp, mark);
575 
576     uint32_t timeDiff;
577 
578     if (mMark == true)
579     {
580         timeDiff = 0;
581     }
582     else
583     {
584         timeDiff = timestamp - mPrevTimestamp;
585     }
586 
587     if (subtype == MEDIASUBTYPE_BITSTREAM_T140)
588     {
589         if (mRedundantLevel > 1 && mRedundantPayload > 0)
590         {
591             mRtpSession->SendRtpPacket(mRedundantPayload, data, size, timestamp, mark, timeDiff);
592         }
593         else
594         {
595             mRtpSession->SendRtpPacket(mRtpPayloadRx, data, size, timestamp, mark, timeDiff);
596         }
597     }
598     else if (subtype == MEDIASUBTYPE_BITSTREAM_T140_RED)
599     {
600         mRtpSession->SendRtpPacket(mRtpPayloadTx, data, size, timestamp, mark, timeDiff);
601     }
602 
603     mMark = false;
604     mPrevTimestamp = timestamp;
605 }
606 
SetRtpContext(RtpContextParams & rtpContextParams)607 void RtpEncoderNode::SetRtpContext(RtpContextParams& rtpContextParams)
608 {
609     mRtpContextParams = rtpContextParams;
610 }
611 
GetRtpContext(RtpContextParams & rtpContextParams)612 void RtpEncoderNode::GetRtpContext(RtpContextParams& rtpContextParams)
613 {
614     uint32_t ssrc = 0;
615     uint32_t timestamp = 0;
616     uint16_t seqNumber = 0;
617 
618     if (mRtpSession != nullptr)
619     {
620         mRtpSession->GetRtpContext(ssrc, timestamp, seqNumber);
621     }
622 
623     rtpContextParams.setSsrc(ssrc);
624     rtpContextParams.setTimestamp(timestamp);
625     rtpContextParams.setSequenceNumber(seqNumber);
626 }