1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 /**
17 ******************************************************************************
18 * @file M4VSS3GPP_EditVideo.c
19 * @brief Video Studio Service 3GPP edit API implementation.
20 * @note
21 ******************************************************************************
22 */
23 #undef M4OSA_TRACE_LEVEL
24 #define M4OSA_TRACE_LEVEL 1
25
26 /****************/
27 /*** Includes ***/
28 /****************/
29
30 #include "NXPSW_CompilerSwitches.h"
31 /**
32 * Our header */
33 #include "M4VSS3GPP_API.h"
34 #include "M4VSS3GPP_InternalTypes.h"
35 #include "M4VSS3GPP_InternalFunctions.h"
36 #include "M4VSS3GPP_InternalConfig.h"
37 #include "M4VSS3GPP_ErrorCodes.h"
38
39 // StageFright encoders require %16 resolution
40 #include "M4ENCODER_common.h"
41 /**
42 * OSAL headers */
43 #include "M4OSA_Memory.h" /**< OSAL memory management */
44 #include "M4OSA_Debug.h" /**< OSAL debug management */
45
46 /**
47 * component includes */
48 #include "M4VFL_transition.h" /**< video effects */
49
50 /*for transition behaviour*/
51 #include <math.h>
52 #include "M4AIR_API.h"
53 #include "M4VSS3GPP_Extended_API.h"
54 /** Determine absolute value of a. */
55 #define M4xVSS_ABS(a) ( ( (a) < (0) ) ? (-(a)) : (a) )
56 #define Y_PLANE_BORDER_VALUE 0x00
57 #define U_PLANE_BORDER_VALUE 0x80
58 #define V_PLANE_BORDER_VALUE 0x80
59
60 /************************************************************************/
61 /* Static local functions */
62 /************************************************************************/
63
64 static M4OSA_ERR M4VSS3GPP_intCheckVideoMode(
65 M4VSS3GPP_InternalEditContext *pC );
66 static M4OSA_Void
67 M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC,
68 M4OSA_UInt8 uiClipNumber );
69 static M4OSA_ERR M4VSS3GPP_intApplyVideoEffect(
70 M4VSS3GPP_InternalEditContext *pC, M4VIFI_ImagePlane *pPlaneIn,
71 M4VIFI_ImagePlane *pPlaneOut, M4OSA_Bool bSkipFramingEffect);
72
73 static M4OSA_ERR
74 M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC,
75 M4VIFI_ImagePlane *pPlaneOut );
76
77 static M4OSA_Void
78 M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC,
79 M4SYS_AccessUnit *pAU );
80 static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer,
81 M4OSA_UInt8 uiCts );
82 static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
83 M4OSA_UInt32 uiCtsSec );
84 static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
85 M4OSA_UInt32 *pCtsSec );
86 static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes,
87 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight );
88 static M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(
89 M4OSA_Void* pFileIn, M4OSA_FileReadPointer* pFileReadPtr,
90 M4VIFI_ImagePlane* pImagePlanes,
91 M4OSA_UInt32 width,M4OSA_UInt32 height);
92 static M4OSA_ERR M4VSS3GPP_intApplyRenderingMode(
93 M4VSS3GPP_InternalEditContext *pC,
94 M4xVSS_MediaRendering renderingMode,
95 M4VIFI_ImagePlane* pInplane,
96 M4VIFI_ImagePlane* pOutplane);
97
98 static M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 (
99 M4VSS3GPP_InternalEditContext *pC,
100 M4VSS3GPP_ClipContext* pClipCtxt);
101 static M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect(
102 M4VSS3GPP_InternalEditContext *pC,
103 M4VSS3GPP_ClipContext* pClipCtxt,
104 M4_MediaTime ts,
105 M4OSA_Bool bIsClip1,
106 M4VIFI_ImagePlane *pResizePlane,
107 M4VIFI_ImagePlane *pPlaneNoResize,
108 M4VIFI_ImagePlane *pPlaneOut);
109
110 static M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn,
111 M4OSA_UInt32 rotationDegree);
112
113 static M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn,
114 M4OSA_UInt32 width, M4OSA_UInt32 height);
115
116 static M4OSA_ERR M4VSS3GPP_intApplyVideoOverlay (
117 M4VSS3GPP_InternalEditContext *pC,
118 M4VIFI_ImagePlane *pPlaneIn,
119 M4VIFI_ImagePlane *pPlaneOut);
120
121 /**
122 ******************************************************************************
123 * M4OSA_ERR M4VSS3GPP_intEditStepVideo()
124 * @brief One step of video processing
125 * @param pC (IN/OUT) Internal edit context
126 ******************************************************************************
127 */
M4VSS3GPP_intEditStepVideo(M4VSS3GPP_InternalEditContext * pC)128 M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC )
129 {
130 M4OSA_ERR err;
131 M4OSA_Int32 iCts, iNextCts;
132 M4ENCODER_FrameMode FrameMode;
133 M4OSA_Bool bSkipFrame;
134 M4OSA_UInt16 offset;
135
136 /**
137 * Check if we reached end cut. Decorrelate input and output encoding
138 * timestamp to handle encoder prefetch
139 */
140 if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset
141 + pC->iInOutTimeOffset) >= pC->pC1->iEndTime )
142 {
143 /* Re-adjust video to precise cut time */
144 pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts))
145 - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime;
146 if ( pC->iInOutTimeOffset < 0 ) {
147 pC->iInOutTimeOffset = 0;
148 }
149
150 /**
151 * Video is done for this clip */
152 err = M4VSS3GPP_intReachedEndOfVideo(pC);
153
154 /* RC: to know when a file has been processed */
155 if (M4NO_ERROR != err && err != M4VSS3GPP_WAR_SWITCH_CLIP)
156 {
157 M4OSA_TRACE1_1(
158 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intReachedEndOfVideo returns 0x%x",
159 err);
160 }
161
162 return err;
163 }
164
165 /* Don't change the states if we are in decodeUpTo() */
166 if ( (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus)
167 && (( pC->pC2 == M4OSA_NULL)
168 || (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)) )
169 {
170 /**
171 * Check Video Mode, depending on the current output CTS */
172 err = M4VSS3GPP_intCheckVideoMode(
173 pC); /**< This function change the pC->Vstate variable! */
174
175 if (M4NO_ERROR != err)
176 {
177 M4OSA_TRACE1_1(
178 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intCheckVideoMode returns 0x%x!",
179 err);
180 return err;
181 }
182 }
183
184
185 switch( pC->Vstate )
186 {
187 /* _________________ */
188 /*| |*/
189 /*| READ_WRITE MODE |*/
190 /*|_________________|*/
191
192 case M4VSS3GPP_kEditVideoState_READ_WRITE:
193 case M4VSS3GPP_kEditVideoState_AFTER_CUT:
194 {
195 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo READ_WRITE");
196
197 bSkipFrame = M4OSA_FALSE;
198
199 /**
200 * If we were decoding the clip, we must jump to be sure
201 * to get to the good position. */
202 if( M4VSS3GPP_kClipStatus_READ != pC->pC1->Vstatus )
203 {
204 /**
205 * Jump to target video time (tc = to-T) */
206 // Decorrelate input and output encoding timestamp to handle encoder prefetch
207 iCts = (M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset;
208 err = pC->pC1->ShellAPI.m_pReader->m_pFctJump(
209 pC->pC1->pReaderContext,
210 (M4_StreamHandler *)pC->pC1->pVideoStream, &iCts);
211
212 if( M4NO_ERROR != err )
213 {
214 M4OSA_TRACE1_1(
215 "M4VSS3GPP_intEditStepVideo:\
216 READ_WRITE: m_pReader->m_pFctJump(V1) returns 0x%x!",
217 err);
218 return err;
219 }
220
221 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
222 pC->pC1->pReaderContext,
223 (M4_StreamHandler *)pC->pC1->pVideoStream,
224 &pC->pC1->VideoAU);
225
226 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
227 {
228 M4OSA_TRACE1_1(
229 "M4VSS3GPP_intEditStepVideo:\
230 READ_WRITE: m_pReader->m_pFctGetNextAu returns 0x%x!",
231 err);
232 return err;
233 }
234
235 M4OSA_TRACE2_3("A .... read : cts = %.0f + %ld [ 0x%x ]",
236 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
237 pC->pC1->VideoAU.m_size);
238
239 /* This frame has been already written in BEGIN CUT step -> skip it */
240 if( pC->pC1->VideoAU.m_CTS == iCts
241 && pC->pC1->iVideoRenderCts >= iCts )
242 {
243 bSkipFrame = M4OSA_TRUE;
244 }
245 }
246
247 /* This frame has been already written in BEGIN CUT step -> skip it */
248 if( ( pC->Vstate == M4VSS3GPP_kEditVideoState_AFTER_CUT)
249 && (pC->pC1->VideoAU.m_CTS
250 + pC->pC1->iVoffset <= pC->ewc.WriterVideoAU.CTS) )
251 {
252 bSkipFrame = M4OSA_TRUE;
253 }
254
255 /**
256 * Remember the clip reading state */
257 pC->pC1->Vstatus = M4VSS3GPP_kClipStatus_READ;
258 // Decorrelate input and output encoding timestamp to handle encoder prefetch
259 // Rounding is to compensate reader imprecision (m_CTS is actually an integer)
260 iCts = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pC->pC1->iVoffset - 1;
261 iNextCts = iCts + ((M4OSA_Int32)pC->dOutputFrameDuration) + 1;
262 /* Avoid to write a last frame of duration 0 */
263 if( iNextCts > pC->pC1->iEndTime )
264 iNextCts = pC->pC1->iEndTime;
265
266 /**
267 * If the AU is good to be written, write it, else just skip it */
268 if( ( M4OSA_FALSE == bSkipFrame)
269 && (( pC->pC1->VideoAU.m_CTS >= iCts)
270 && (pC->pC1->VideoAU.m_CTS < iNextCts)
271 && (pC->pC1->VideoAU.m_size > 0)) )
272 {
273 /**
274 * Get the output AU to write into */
275 err = pC->ShellAPI.pWriterDataFcts->pStartAU(
276 pC->ewc.p3gpWriterContext,
277 M4VSS3GPP_WRITER_VIDEO_STREAM_ID,
278 &pC->ewc.WriterVideoAU);
279
280 if( M4NO_ERROR != err )
281 {
282 M4OSA_TRACE1_1(
283 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
284 pWriterDataFcts->pStartAU(Video) returns 0x%x!",
285 err);
286 return err;
287 }
288
289 /**
290 * Copy the input AU to the output AU */
291 pC->ewc.WriterVideoAU.attribute = pC->pC1->VideoAU.m_attribute;
292 // Decorrelate input and output encoding timestamp to handle encoder prefetch
293 pC->ewc.WriterVideoAU.CTS = (M4OSA_Time)pC->pC1->VideoAU.m_CTS +
294 (M4OSA_Time)pC->pC1->iVoffset;
295 pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
296 offset = 0;
297 /* for h.264 stream do not read the 1st 4 bytes as they are header
298 indicators */
299 if( pC->pC1->pVideoStream->m_basicProperties.m_streamType
300 == M4DA_StreamTypeVideoMpeg4Avc )
301 offset = 4;
302
303 pC->ewc.WriterVideoAU.size = pC->pC1->VideoAU.m_size - offset;
304 if( pC->ewc.WriterVideoAU.size > pC->ewc.uiVideoMaxAuSize )
305 {
306 M4OSA_TRACE1_2(
307 "M4VSS3GPP_intEditStepVideo: READ_WRITE: AU size greater than\
308 MaxAuSize (%d>%d)! returning M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE",
309 pC->ewc.WriterVideoAU.size, pC->ewc.uiVideoMaxAuSize);
310 return M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE;
311 }
312
313 memcpy((void *)pC->ewc.WriterVideoAU.dataAddress,
314 (void *)(pC->pC1->VideoAU.m_dataAddress + offset),
315 (pC->ewc.WriterVideoAU.size));
316
317 /**
318 * Update time info for the Counter Time System to be equal to the bit
319 -stream time*/
320 M4VSS3GPP_intUpdateTimeInfo(pC, &pC->ewc.WriterVideoAU);
321 M4OSA_TRACE2_2("B ---- write : cts = %lu [ 0x%x ]",
322 pC->ewc.WriterVideoAU.CTS, pC->ewc.WriterVideoAU.size);
323
324 /**
325 * Write the AU */
326 err = pC->ShellAPI.pWriterDataFcts->pProcessAU(
327 pC->ewc.p3gpWriterContext,
328 M4VSS3GPP_WRITER_VIDEO_STREAM_ID,
329 &pC->ewc.WriterVideoAU);
330
331 if( M4NO_ERROR != err )
332 {
333 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
334 file size is reached
335 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE
336 is returned*/
337 if( M4WAR_WRITER_STOP_REQ == err )
338 {
339 M4OSA_TRACE1_0(
340 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
341 return M4VSS3GPP_WAR_EDITING_DONE;
342 }
343 else
344 {
345 M4OSA_TRACE1_1(
346 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
347 pWriterDataFcts->pProcessAU(Video) returns 0x%x!",
348 err);
349 return err;
350 }
351 }
352
353 /**
354 * Read next AU for next step */
355 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
356 pC->pC1->pReaderContext,
357 (M4_StreamHandler *)pC->pC1->pVideoStream,
358 &pC->pC1->VideoAU);
359
360 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
361 {
362 M4OSA_TRACE1_1(
363 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
364 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!",
365 err);
366 return err;
367 }
368
369 M4OSA_TRACE2_3("C .... read : cts = %.0f + %ld [ 0x%x ]",
370 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
371 pC->pC1->VideoAU.m_size);
372 }
373 else
374 {
375 /**
376 * Decide wether to read or to increment time increment */
377 if( ( pC->pC1->VideoAU.m_size == 0)
378 || (pC->pC1->VideoAU.m_CTS >= iNextCts) )
379 {
380 /*Increment time by the encoding period (NO_MORE_AU or reader in advance */
381 // Decorrelate input and output encoding timestamp to handle encoder prefetch
382 pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
383
384 /* Switch (from AFTER_CUT) to normal mode because time is
385 no more frozen */
386 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
387 }
388 else
389 {
390 /* In other cases (reader late), just let the reader catch up
391 pC->ewc.dVTo */
392 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
393 pC->pC1->pReaderContext,
394 (M4_StreamHandler *)pC->pC1->pVideoStream,
395 &pC->pC1->VideoAU);
396
397 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
398 {
399 M4OSA_TRACE1_1(
400 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
401 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!",
402 err);
403 return err;
404 }
405
406 M4OSA_TRACE2_3("D .... read : cts = %.0f + %ld [ 0x%x ]",
407 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
408 pC->pC1->VideoAU.m_size);
409 }
410 }
411 }
412 break;
413
414 /* ____________________ */
415 /*| |*/
416 /*| DECODE_ENCODE MODE |*/
417 /*| BEGIN_CUT MODE |*/
418 /*|____________________|*/
419
420 case M4VSS3GPP_kEditVideoState_DECODE_ENCODE:
421 case M4VSS3GPP_kEditVideoState_BEGIN_CUT:
422 {
423 M4OSA_TRACE3_0(
424 "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT");
425
426 if ((pC->pC1->pSettings->FileType ==
427 M4VIDEOEDITING_kFileType_ARGB8888) &&
428 (M4OSA_FALSE ==
429 pC->pC1->pSettings->ClipProperties.bSetImageData)) {
430
431 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1);
432 if( M4NO_ERROR != err ) {
433 M4OSA_TRACE1_1(
434 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
435 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err);
436 return err;
437 }
438 }
439 /**
440 * Decode the video up to the target time
441 (will jump to the previous RAP if needed ) */
442 // Decorrelate input and output encoding timestamp to handle encoder prefetch
443 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts);
444 if( M4NO_ERROR != err )
445 {
446 M4OSA_TRACE1_1(
447 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
448 M4VSS3GPP_intDecodeVideoUpToCts returns err=0x%x",
449 err);
450 return err;
451 }
452
453 /* If the decoding is not completed, do one more step with time frozen */
454 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus )
455 {
456 return M4NO_ERROR;
457 }
458
459 /**
460 * Reset the video pre-processing error before calling the encoder */
461 pC->ewc.VppError = M4NO_ERROR;
462
463 M4OSA_TRACE2_0("E ++++ encode AU");
464
465 /**
466 * Encode the frame(rendering,filtering and writing will be done
467 in encoder callbacks)*/
468 if( pC->Vstate == M4VSS3GPP_kEditVideoState_BEGIN_CUT )
469 FrameMode = M4ENCODER_kIFrame;
470 else
471 FrameMode = M4ENCODER_kNormalFrame;
472
473 // Decorrelate input and output encoding timestamp to handle encoder prefetch
474 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL,
475 pC->ewc.dInputVidCts, FrameMode);
476 /**
477 * Check if we had a VPP error... */
478 if( M4NO_ERROR != pC->ewc.VppError )
479 {
480 M4OSA_TRACE1_1(
481 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
482 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x",
483 pC->ewc.VppError);
484 #ifdef M4VSS_SUPPORT_OMX_CODECS
485
486 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError )
487 {
488 #endif //M4VSS_SUPPORT_OMX_CODECS
489
490 return pC->ewc.VppError;
491 #ifdef M4VSS_SUPPORT_OMX_CODECS
492
493 }
494
495 #endif //M4VSS_SUPPORT_OMX_CODECS
496 }
497 else if( M4NO_ERROR != err ) /**< ...or an encoder error */
498 {
499 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err )
500 {
501 M4OSA_TRACE1_0(
502 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
503 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR");
504 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR;
505 }
506 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
507 file size is reached
508 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE
509 is returned*/
510 else if( M4WAR_WRITER_STOP_REQ == err )
511 {
512 M4OSA_TRACE1_0(
513 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
514 return M4VSS3GPP_WAR_EDITING_DONE;
515 }
516 else
517 {
518 M4OSA_TRACE1_1(
519 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
520 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x",
521 err);
522 return err;
523 }
524 }
525
526 /**
527 * Increment time by the encoding period (for begin cut, do not increment to not
528 loose P-frames) */
529 if( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate )
530 {
531 // Decorrelate input and output encoding timestamp to handle encoder prefetch
532 pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
533 }
534 }
535 break;
536
537 /* _________________ */
538 /*| |*/
539 /*| TRANSITION MODE |*/
540 /*|_________________|*/
541
542 case M4VSS3GPP_kEditVideoState_TRANSITION:
543 {
544 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo TRANSITION");
545
546 /* Don't decode more than needed */
547 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus)
548 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus)) )
549 {
550 /**
551 * Decode the clip1 video up to the target time
552 (will jump to the previous RAP if needed */
553 if ((pC->pC1->pSettings->FileType ==
554 M4VIDEOEDITING_kFileType_ARGB8888) &&
555 (M4OSA_FALSE ==
556 pC->pC1->pSettings->ClipProperties.bSetImageData)) {
557
558 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1);
559 if( M4NO_ERROR != err ) {
560 M4OSA_TRACE1_1(
561 "M4VSS3GPP_intEditStepVideo: TRANSITION:\
562 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err);
563 return err;
564 }
565 }
566 // Decorrelate input and output encoding timestamp to handle encoder prefetch
567 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1,
568 (M4OSA_Int32)pC->ewc.dInputVidCts);
569 if( M4NO_ERROR != err )
570 {
571 M4OSA_TRACE1_1(
572 "M4VSS3GPP_intEditStepVideo: TRANSITION:\
573 M4VSS3GPP_intDecodeVideoUpToCts(C1) returns err=0x%x",
574 err);
575 return err;
576 }
577
578 /* If the decoding is not completed, do one more step with time frozen */
579 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus )
580 {
581 return M4NO_ERROR;
582 }
583 }
584
585 /* Don't decode more than needed */
586 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)
587 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus)) )
588 {
589 /**
590 * Decode the clip2 video up to the target time
591 (will jump to the previous RAP if needed) */
592 if ((pC->pC2->pSettings->FileType ==
593 M4VIDEOEDITING_kFileType_ARGB8888) &&
594 (M4OSA_FALSE ==
595 pC->pC2->pSettings->ClipProperties.bSetImageData)) {
596
597 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC2);
598 if( M4NO_ERROR != err ) {
599 M4OSA_TRACE1_1(
600 "M4VSS3GPP_intEditStepVideo: TRANSITION:\
601 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err);
602 return err;
603 }
604 }
605
606 // Decorrelate input and output encoding timestamp to handle encoder prefetch
607 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2,
608 (M4OSA_Int32)pC->ewc.dInputVidCts);
609 if( M4NO_ERROR != err )
610 {
611 M4OSA_TRACE1_1(
612 "M4VSS3GPP_intEditStepVideo: TRANSITION:\
613 M4VSS3GPP_intDecodeVideoUpToCts(C2) returns err=0x%x",
614 err);
615 return err;
616 }
617
618 /* If the decoding is not completed, do one more step with time frozen */
619 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus )
620 {
621 return M4NO_ERROR;
622 }
623 }
624
625 /**
626 * Reset the video pre-processing error before calling the encoder */
627 pC->ewc.VppError = M4NO_ERROR;
628
629 M4OSA_TRACE2_0("F **** blend AUs");
630
631 /**
632 * Encode the frame (rendering, filtering and writing will be done
633 in encoder callbacks */
634 // Decorrelate input and output encoding timestamp to handle encoder prefetch
635 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL,
636 pC->ewc.dInputVidCts, M4ENCODER_kNormalFrame);
637
638 /**
639 * If encode returns a process frame error, it is likely to be a VPP error */
640 if( M4NO_ERROR != pC->ewc.VppError )
641 {
642 M4OSA_TRACE1_1(
643 "M4VSS3GPP_intEditStepVideo: TRANSITION:\
644 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x",
645 pC->ewc.VppError);
646 #ifdef M4VSS_SUPPORT_OMX_CODECS
647
648 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError )
649 {
650
651 #endif //M4VSS_SUPPORT_OMX_CODECS
652
653 return pC->ewc.VppError;
654 #ifdef M4VSS_SUPPORT_OMX_CODECS
655
656 }
657
658 #endif //M4VSS_SUPPORT_OMX_CODECS
659 }
660 else if( M4NO_ERROR != err ) /**< ...or an encoder error */
661 {
662 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err )
663 {
664 M4OSA_TRACE1_0(
665 "M4VSS3GPP_intEditStepVideo: TRANSITION:\
666 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR");
667 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR;
668 }
669
670 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
671 file size is reached
672 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE is
673 returned*/
674 else if( M4WAR_WRITER_STOP_REQ == err )
675 {
676 M4OSA_TRACE1_0(
677 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
678 return M4VSS3GPP_WAR_EDITING_DONE;
679 }
680 else
681 {
682 M4OSA_TRACE1_1(
683 "M4VSS3GPP_intEditStepVideo: TRANSITION:\
684 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x",
685 err);
686 return err;
687 }
688 }
689
690 /**
691 * Increment time by the encoding period */
692 // Decorrelate input and output encoding timestamp to handle encoder prefetch
693 pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
694 }
695 break;
696
697 /* ____________ */
698 /*| |*/
699 /*| ERROR CASE |*/
700 /*|____________|*/
701
702 default:
703 M4OSA_TRACE1_1(
704 "M4VSS3GPP_intEditStepVideo: invalid internal state (0x%x),\
705 returning M4VSS3GPP_ERR_INTERNAL_STATE",
706 pC->Vstate);
707 return M4VSS3GPP_ERR_INTERNAL_STATE;
708 }
709
710 /**
711 * Return with no error */
712 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo: returning M4NO_ERROR");
713 return M4NO_ERROR;
714 }
715
716 /**
717 ******************************************************************************
718 * M4OSA_ERR M4VSS3GPP_intCheckVideoMode()
719 * @brief Check which video process mode we must use, depending on the output CTS.
720 * @param pC (IN/OUT) Internal edit context
721 ******************************************************************************
722 */
M4VSS3GPP_intCheckVideoMode(M4VSS3GPP_InternalEditContext * pC)723 static M4OSA_ERR M4VSS3GPP_intCheckVideoMode(
724 M4VSS3GPP_InternalEditContext *pC )
725 {
726 M4OSA_ERR err;
727 // Decorrelate input and output encoding timestamp to handle encoder prefetch
728 const M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts;
729 /**< Transition duration */
730 const M4OSA_Int32 TD = pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration;
731
732 M4OSA_Int32 iTmp;
733
734 const M4VSS3GPP_EditVideoState previousVstate = pC->Vstate;
735
736 /**
737 * Check if Clip1 is on its begin cut, or in an effect zone */
738 M4VSS3GPP_intCheckVideoEffects(pC, 1);
739
740 /**
741 * Check if we are in the transition with next clip */
742 if( ( TD > 0) && (( t - pC->pC1->iVoffset) >= (pC->pC1->iEndTime - TD)) )
743 {
744 /**
745 * We are in a transition */
746 pC->Vstate = M4VSS3GPP_kEditVideoState_TRANSITION;
747 pC->bTransitionEffect = M4OSA_TRUE;
748
749 /**
750 * Open second clip for transition, if not yet opened */
751 if( M4OSA_NULL == pC->pC2 )
752 {
753 pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE;
754
755 err = M4VSS3GPP_intOpenClip(pC, &pC->pC2,
756 &pC->pClipList[pC->uiCurrentClip + 1]);
757
758 if( M4NO_ERROR != err )
759 {
760 M4OSA_TRACE1_1(
761 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_editOpenClip returns 0x%x!",
762 err);
763 return err;
764 }
765
766 /**
767 * Add current video output CTS to the clip offset
768 * (audio output CTS is not yet at the transition, so audio
769 * offset can't be updated yet). */
770 // Decorrelate input and output encoding timestamp to handle encoder prefetch
771 pC->pC2->iVoffset += (M4OSA_UInt32)pC->ewc.dInputVidCts;
772
773 /**
774 * 2005-03-24: BugFix for audio-video synchro:
775 * Update transition duration due to the actual video transition beginning time.
776 * It will avoid desynchronization when doing the audio transition. */
777 // Decorrelate input and output encoding timestamp to handle encoder prefetch
778 iTmp = ((M4OSA_Int32)pC->ewc.dInputVidCts)\
779 - (pC->pC1->iEndTime - TD + pC->pC1->iVoffset);
780 if (iTmp < (M4OSA_Int32)pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration)
781 /**< Test in case of a very short transition */
782 {
783 pC->pTransitionList[pC->
784 uiCurrentClip].uiTransitionDuration -= iTmp;
785
786 /**
787 * Don't forget to also correct the total duration used for the progress bar
788 * (it was computed with the original transition duration). */
789 pC->ewc.iOutputDuration += iTmp;
790 }
791 /**< No "else" here because it's hard predict the effect of 0 duration transition...*/
792 }
793
794 /**
795 * Check effects for clip2 */
796 M4VSS3GPP_intCheckVideoEffects(pC, 2);
797 }
798 else
799 {
800 /**
801 * We are not in a transition */
802 pC->bTransitionEffect = M4OSA_FALSE;
803
804 /* If there is an effect we go to decode/encode mode */
805 if((pC->nbActiveEffects > 0) || (pC->nbActiveEffects1 > 0) ||
806 (pC->pC1->pSettings->FileType ==
807 M4VIDEOEDITING_kFileType_ARGB8888) ||
808 (pC->pC1->pSettings->bTranscodingRequired == M4OSA_TRUE)) {
809 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
810 }
811 /* We do a begin cut, except if already done (time is not progressing because we want
812 to catch all P-frames after the cut) */
813 else if( M4OSA_TRUE == pC->bClip1AtBeginCut )
814 {
815 if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) {
816 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
817 pC->bEncodeTillEoF = M4OSA_TRUE;
818 } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
819 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) {
820 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
821 } else {
822 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
823 }
824 }
825 /* Else we are in default copy/paste mode */
826 else
827 {
828 if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
829 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) )
830 {
831 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
832 }
833 else if( pC->bIsMMS == M4OSA_TRUE )
834 {
835 M4OSA_UInt32 currentBitrate;
836 M4OSA_ERR err = M4NO_ERROR;
837
838 /* Do we need to reencode the video to downgrade the bitrate or not ? */
839 /* Let's compute the cirrent bitrate of the current edited clip */
840 err = pC->pC1->ShellAPI.m_pReader->m_pFctGetOption(
841 pC->pC1->pReaderContext,
842 M4READER_kOptionID_Bitrate, ¤tBitrate);
843
844 if( err != M4NO_ERROR )
845 {
846 M4OSA_TRACE1_1(
847 "M4VSS3GPP_intCheckVideoMode:\
848 Error when getting next bitrate of edited clip: 0x%x",
849 err);
850 return err;
851 }
852
853 /* Remove audio bitrate */
854 currentBitrate -= 12200;
855
856 /* Test if we go into copy/paste mode or into decode/encode mode */
857 if( currentBitrate > pC->uiMMSVideoBitrate )
858 {
859 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
860 }
861 else
862 {
863 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
864 }
865 }
866 else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) &&
867 (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) &&
868 pC->bEncodeTillEoF == M4OSA_FALSE)
869 {
870 /**
871 * Test if we go into copy/paste mode or into decode/encode mode
872 * If an external effect has been applied on the current clip
873 * then continue to be in decode/encode mode till end of
874 * clip to avoid H.264 distortion.
875 */
876 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
877 }
878 }
879 }
880
881 /**
882 * Check if we create an encoder */
883 if( ( ( M4VSS3GPP_kEditVideoState_READ_WRITE == previousVstate)
884 || (M4VSS3GPP_kEditVideoState_AFTER_CUT
885 == previousVstate)) /**< read mode */
886 && (( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate)
887 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == pC->Vstate)
888 || (M4VSS3GPP_kEditVideoState_TRANSITION
889 == pC->Vstate)) /**< encode mode */
890 && pC->bIsMMS == M4OSA_FALSE )
891 {
892 /**
893 * Create the encoder, if not created already*/
894 if (pC->ewc.encoderState == M4VSS3GPP_kNoEncoder) {
895 err = M4VSS3GPP_intCreateVideoEncoder(pC);
896
897 if( M4NO_ERROR != err )
898 {
899 M4OSA_TRACE1_1(
900 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder \
901 returns 0x%x!", err);
902 return err;
903 }
904 }
905 }
906 else if( pC->bIsMMS == M4OSA_TRUE && pC->ewc.pEncContext == M4OSA_NULL )
907 {
908 /**
909 * Create the encoder */
910 err = M4VSS3GPP_intCreateVideoEncoder(pC);
911
912 if( M4NO_ERROR != err )
913 {
914 M4OSA_TRACE1_1(
915 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!",
916 err);
917 return err;
918 }
919 }
920
921 /**
922 * When we go from filtering to read/write, we must act like a begin cut,
923 * because the last filtered image may be different than the original image. */
924 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate)
925 || (M4VSS3GPP_kEditVideoState_TRANSITION
926 == previousVstate)) /**< encode mode */
927 && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */
928 && (pC->bEncodeTillEoF == M4OSA_FALSE) )
929 {
930 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
931 }
932
933 /**
934 * Check if we destroy an encoder */
935 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate)
936 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
937 || (M4VSS3GPP_kEditVideoState_TRANSITION
938 == previousVstate)) /**< encode mode */
939 && (( M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate)
940 || (M4VSS3GPP_kEditVideoState_AFTER_CUT
941 == pC->Vstate)) /**< read mode */
942 && pC->bIsMMS == M4OSA_FALSE )
943 {
944 /**
945 * Destroy the previously created encoder */
946 err = M4VSS3GPP_intDestroyVideoEncoder(pC);
947
948 if( M4NO_ERROR != err )
949 {
950 M4OSA_TRACE1_1(
951 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intDestroyVideoEncoder returns 0x%x!",
952 err);
953 return err;
954 }
955 }
956
957 /**
958 * Return with no error */
959 M4OSA_TRACE3_0("M4VSS3GPP_intCheckVideoMode: returning M4NO_ERROR");
960 return M4NO_ERROR;
961 }
962
963 /******************************************************************************
964 * M4OSA_ERR M4VSS3GPP_intStartAU()
965 * @brief StartAU writer-like interface used for the VSS 3GPP only
966 * @note
967 * @param pContext: (IN) It is the VSS 3GPP context in our case
968 * @param streamID: (IN) Id of the stream to which the Access Unit is related.
969 * @param pAU: (IN/OUT) Access Unit to be prepared.
970 * @return M4NO_ERROR: there is no error
971 ******************************************************************************
972 */
M4VSS3GPP_intStartAU(M4WRITER_Context pContext,M4SYS_StreamID streamID,M4SYS_AccessUnit * pAU)973 M4OSA_ERR M4VSS3GPP_intStartAU( M4WRITER_Context pContext,
974 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU )
975 {
976 M4OSA_ERR err;
977 M4OSA_UInt32 uiMaxAuSize;
978
979 /**
980 * Given context is actually the VSS3GPP context */
981 M4VSS3GPP_InternalEditContext *pC =
982 (M4VSS3GPP_InternalEditContext *)pContext;
983
984 /**
985 * Get the output AU to write into */
986 err = pC->ShellAPI.pWriterDataFcts->pStartAU(pC->ewc.p3gpWriterContext,
987 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU);
988
989 if( M4NO_ERROR != err )
990 {
991 M4OSA_TRACE1_1(
992 "M4VSS3GPP_intStartAU: pWriterDataFcts->pStartAU(Video) returns 0x%x!",
993 err);
994 return err;
995 }
996
997 /**
998 * Return */
999 M4OSA_TRACE3_0("M4VSS3GPP_intStartAU: returning M4NO_ERROR");
1000 return M4NO_ERROR;
1001 }
1002
1003 /******************************************************************************
1004 * M4OSA_ERR M4VSS3GPP_intProcessAU()
1005 * @brief ProcessAU writer-like interface used for the VSS 3GPP only
1006 * @note
1007 * @param pContext: (IN) It is the VSS 3GPP context in our case
1008 * @param streamID: (IN) Id of the stream to which the Access Unit is related.
1009 * @param pAU: (IN/OUT) Access Unit to be written
1010 * @return M4NO_ERROR: there is no error
1011 ******************************************************************************
1012 */
M4VSS3GPP_intProcessAU(M4WRITER_Context pContext,M4SYS_StreamID streamID,M4SYS_AccessUnit * pAU)1013 M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext,
1014 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU )
1015 {
1016 M4OSA_ERR err;
1017
1018 /**
1019 * Given context is actually the VSS3GPP context */
1020 M4VSS3GPP_InternalEditContext *pC =
1021 (M4VSS3GPP_InternalEditContext *)pContext;
1022
1023 /**
1024 * Fix the encoded AU time */
1025 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1026 pC->ewc.dOutputVidCts = pAU->CTS;
1027 /**
1028 * Update time info for the Counter Time System to be equal to the bit-stream time */
1029 M4VSS3GPP_intUpdateTimeInfo(pC, pAU);
1030
1031 /**
1032 * Write the AU */
1033 err = pC->ShellAPI.pWriterDataFcts->pProcessAU(pC->ewc.p3gpWriterContext,
1034 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU);
1035
1036 if( M4NO_ERROR != err )
1037 {
1038 M4OSA_TRACE1_1(
1039 "M4VSS3GPP_intProcessAU: pWriterDataFcts->pProcessAU(Video) returns 0x%x!",
1040 err);
1041 return err;
1042 }
1043
1044 /**
1045 * Return */
1046 M4OSA_TRACE3_0("M4VSS3GPP_intProcessAU: returning M4NO_ERROR");
1047 return M4NO_ERROR;
1048 }
1049
1050 /**
1051 ******************************************************************************
1052 * M4OSA_ERR M4VSS3GPP_intVPP()
1053 * @brief We implement our own VideoPreProcessing function
1054 * @note It is called by the video encoder
1055 * @param pContext (IN) VPP context, which actually is the VSS 3GPP context in our case
1056 * @param pPlaneIn (IN)
1057 * @param pPlaneOut (IN/OUT) Pointer to an array of 3 planes that will contain the output
1058 * YUV420 image
1059 * @return M4NO_ERROR: No error
1060 ******************************************************************************
1061 */
M4VSS3GPP_intVPP(M4VPP_Context pContext,M4VIFI_ImagePlane * pPlaneIn,M4VIFI_ImagePlane * pPlaneOut)1062 M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
1063 M4VIFI_ImagePlane *pPlaneOut )
1064 {
1065 M4OSA_ERR err = M4NO_ERROR;
1066 M4_MediaTime ts;
1067 M4VIFI_ImagePlane *pTmp = M4OSA_NULL;
1068 M4VIFI_ImagePlane *pLastDecodedFrame = M4OSA_NULL ;
1069 M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL;
1070 M4VIFI_ImagePlane pTemp1[3],pTemp2[3];
1071 M4VIFI_ImagePlane pTempPlaneClip1[3],pTempPlaneClip2[3];
1072 M4OSA_UInt32 i = 0, yuvFrameWidth = 0, yuvFrameHeight = 0;
1073 M4OSA_Bool bSkipFrameEffect = M4OSA_FALSE;
1074 /**
1075 * VPP context is actually the VSS3GPP context */
1076 M4VSS3GPP_InternalEditContext *pC =
1077 (M4VSS3GPP_InternalEditContext *)pContext;
1078
1079 memset((void *)pTemp1, 0, 3*sizeof(M4VIFI_ImagePlane));
1080 memset((void *)pTemp2, 0, 3*sizeof(M4VIFI_ImagePlane));
1081 memset((void *)pTempPlaneClip1, 0, 3*sizeof(M4VIFI_ImagePlane));
1082 memset((void *)pTempPlaneClip2, 0, 3*sizeof(M4VIFI_ImagePlane));
1083
1084 /**
1085 * Reset VPP error remembered in context */
1086 pC->ewc.VppError = M4NO_ERROR;
1087
1088 /**
1089 * At the end of the editing, we may be called when no more clip is loaded.
1090 * (because to close the encoder properly it must be stepped one or twice...) */
1091 if( M4OSA_NULL == pC->pC1 )
1092 {
1093 /**
1094 * We must fill the input of the encoder with a dummy image, because
1095 * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */
1096 memset((void *)pPlaneOut[0].pac_data,0,
1097 pPlaneOut[0].u_stride * pPlaneOut[0].u_height);
1098 memset((void *)pPlaneOut[1].pac_data,0,
1099 pPlaneOut[1].u_stride * pPlaneOut[1].u_height);
1100 memset((void *)pPlaneOut[2].pac_data,0,
1101 pPlaneOut[2].u_stride * pPlaneOut[2].u_height);
1102
1103 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR (abort)");
1104 return M4NO_ERROR;
1105 }
1106
1107 /**
1108 **************** Transition case ****************/
1109 if( M4OSA_TRUE == pC->bTransitionEffect )
1110 {
1111
1112 err = M4VSS3GPP_intAllocateYUV420(pTemp1, pC->ewc.uiVideoWidth,
1113 pC->ewc.uiVideoHeight);
1114 if (M4NO_ERROR != err)
1115 {
1116 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(1) returns 0x%x, \
1117 returning M4NO_ERROR", err);
1118 pC->ewc.VppError = err;
1119 return M4NO_ERROR; /**< Return no error to the encoder core
1120 (else it may leak in some situations...) */
1121 }
1122
1123 err = M4VSS3GPP_intAllocateYUV420(pTemp2, pC->ewc.uiVideoWidth,
1124 pC->ewc.uiVideoHeight);
1125 if (M4NO_ERROR != err)
1126 {
1127 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(2) returns 0x%x, \
1128 returning M4NO_ERROR", err);
1129 pC->ewc.VppError = err;
1130 return M4NO_ERROR; /**< Return no error to the encoder core
1131 (else it may leak in some situations...) */
1132 }
1133
1134 err = M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth,
1135 pC->ewc.uiVideoHeight);
1136 if( M4NO_ERROR != err )
1137 {
1138 M4OSA_TRACE1_1(
1139 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\
1140 returning M4NO_ERROR",
1141 err);
1142 pC->ewc.VppError = err;
1143 return
1144 M4NO_ERROR; /**< Return no error to the encoder core
1145 (else it may leak in some situations...) */
1146 }
1147
1148 err = M4VSS3GPP_intAllocateYUV420(pC->yuv2, pC->ewc.uiVideoWidth,
1149 pC->ewc.uiVideoHeight);
1150 if( M4NO_ERROR != err )
1151 {
1152 M4OSA_TRACE1_1(
1153 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\
1154 returning M4NO_ERROR",
1155 err);
1156 pC->ewc.VppError = err;
1157 return
1158 M4NO_ERROR; /**< Return no error to the encoder core
1159 (else it may leak in some situations...) */
1160 }
1161
1162 err = M4VSS3GPP_intAllocateYUV420(pC->yuv3, pC->ewc.uiVideoWidth,
1163 pC->ewc.uiVideoHeight);
1164 if( M4NO_ERROR != err )
1165 {
1166 M4OSA_TRACE1_1(
1167 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\
1168 returning M4NO_ERROR",
1169 err);
1170 pC->ewc.VppError = err;
1171 return
1172 M4NO_ERROR; /**< Return no error to the encoder core
1173 (else it may leak in some situations...) */
1174 }
1175
1176 /**
1177 * Compute the time in the clip1 base: ts = to - Offset */
1178 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1179 ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset;
1180
1181 /**
1182 * Render Clip1 */
1183 if( pC->pC1->isRenderDup == M4OSA_FALSE )
1184 {
1185 err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC1, ts, M4OSA_TRUE,
1186 pTempPlaneClip1, pTemp1,
1187 pPlaneOut);
1188 if ((M4NO_ERROR != err) &&
1189 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) {
1190 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1191 M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err);
1192 pC->ewc.VppError = err;
1193 /** Return no error to the encoder core
1194 * else it may leak in some situations.*/
1195 return M4NO_ERROR;
1196 }
1197 }
1198 if ((pC->pC1->isRenderDup == M4OSA_TRUE) ||
1199 (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) {
1200 pTmp = pC->yuv1;
1201 if (pC->pC1->lastDecodedPlane != M4OSA_NULL) {
1202 /* Copy last decoded plane to output plane */
1203 memcpy((void *)pTmp[0].pac_data,
1204 (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1205 (pTmp[0].u_height * pTmp[0].u_width));
1206 memcpy((void *)pTmp[1].pac_data,
1207 (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1208 (pTmp[1].u_height * pTmp[1].u_width));
1209 memcpy((void *)pTmp[2].pac_data,
1210 (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1211 (pTmp[2].u_height * pTmp[2].u_width));
1212 } else {
1213 err = M4VSS3GPP_ERR_NO_VALID_VID_FRAME;
1214 M4OSA_TRACE1_3("Can not find an input frame. Set error 0x%x in %s (%d)",
1215 err, __FILE__, __LINE__);
1216 pC->ewc.VppError = err;
1217 return M4NO_ERROR;
1218 }
1219 pC->pC1->lastDecodedPlane = pTmp;
1220 }
1221
1222 /**
1223 * Compute the time in the clip2 base: ts = to - Offset */
1224 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1225 ts = pC->ewc.dInputVidCts - pC->pC2->iVoffset;
1226 /**
1227 * Render Clip2 */
1228 if( pC->pC2->isRenderDup == M4OSA_FALSE )
1229 {
1230
1231 err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC2, ts, M4OSA_FALSE,
1232 pTempPlaneClip2, pTemp2,
1233 pPlaneOut);
1234 if ((M4NO_ERROR != err) &&
1235 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) {
1236 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1237 M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err);
1238 pC->ewc.VppError = err;
1239 /** Return no error to the encoder core
1240 * else it may leak in some situations.*/
1241 return M4NO_ERROR;
1242 }
1243 }
1244 if ((pC->pC2->isRenderDup == M4OSA_TRUE) ||
1245 (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) {
1246 pTmp = pC->yuv2;
1247 if (pC->pC2->lastDecodedPlane != M4OSA_NULL) {
1248 /* Copy last decoded plane to output plane */
1249 memcpy((void *)pTmp[0].pac_data,
1250 (void *)pC->pC2->lastDecodedPlane[0].pac_data,
1251 (pTmp[0].u_height * pTmp[0].u_width));
1252 memcpy((void *)pTmp[1].pac_data,
1253 (void *)pC->pC2->lastDecodedPlane[1].pac_data,
1254 (pTmp[1].u_height * pTmp[1].u_width));
1255 memcpy((void *)pTmp[2].pac_data,
1256 (void *)pC->pC2->lastDecodedPlane[2].pac_data,
1257 (pTmp[2].u_height * pTmp[2].u_width));
1258 } else {
1259 err = M4VSS3GPP_ERR_NO_VALID_VID_FRAME;
1260 M4OSA_TRACE1_3("Can not find an input frame. Set error 0x%x in %s (%d)",
1261 err, __FILE__, __LINE__);
1262 pC->ewc.VppError = err;
1263 return M4NO_ERROR;
1264 }
1265 pC->pC2->lastDecodedPlane = pTmp;
1266 }
1267
1268
1269 pTmp = pPlaneOut;
1270 err = M4VSS3GPP_intVideoTransition(pC, pTmp);
1271
1272 if( M4NO_ERROR != err )
1273 {
1274 M4OSA_TRACE1_1(
1275 "M4VSS3GPP_intVPP: M4VSS3GPP_intVideoTransition returns 0x%x,\
1276 returning M4NO_ERROR",
1277 err);
1278 pC->ewc.VppError = err;
1279 return M4NO_ERROR; /**< Return no error to the encoder core
1280 (else it may leak in some situations...) */
1281 }
1282 for (i=0; i < 3; i++)
1283 {
1284 if(pTempPlaneClip2[i].pac_data != M4OSA_NULL) {
1285 free(pTempPlaneClip2[i].pac_data);
1286 pTempPlaneClip2[i].pac_data = M4OSA_NULL;
1287 }
1288
1289 if(pTempPlaneClip1[i].pac_data != M4OSA_NULL) {
1290 free(pTempPlaneClip1[i].pac_data);
1291 pTempPlaneClip1[i].pac_data = M4OSA_NULL;
1292 }
1293
1294 if (pTemp2[i].pac_data != M4OSA_NULL) {
1295 free(pTemp2[i].pac_data);
1296 pTemp2[i].pac_data = M4OSA_NULL;
1297 }
1298
1299 if (pTemp1[i].pac_data != M4OSA_NULL) {
1300 free(pTemp1[i].pac_data);
1301 pTemp1[i].pac_data = M4OSA_NULL;
1302 }
1303 }
1304 }
1305 /**
1306 **************** No Transition case ****************/
1307 else
1308 {
1309 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO transition case");
1310 /**
1311 * Compute the time in the clip base: ts = to - Offset */
1312 ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset;
1313 pC->bIssecondClip = M4OSA_FALSE;
1314 /**
1315 * Render */
1316 if (pC->pC1->isRenderDup == M4OSA_FALSE) {
1317 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup false");
1318 /**
1319 * Check if resizing is needed */
1320 if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) {
1321 if ((pC->pC1->pSettings->FileType ==
1322 M4VIDEOEDITING_kFileType_ARGB8888) &&
1323 (pC->nbActiveEffects == 0) &&
1324 (pC->pC1->bGetYuvDataFromDecoder == M4OSA_FALSE)) {
1325 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
1326 pC->pC1->pViDecCtxt,
1327 M4DECODER_kOptionID_EnableYuvWithEffect,
1328 (M4OSA_DataOption)M4OSA_TRUE);
1329 if (M4NO_ERROR == err ) {
1330 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(
1331 pC->pC1->pViDecCtxt, &ts,
1332 pPlaneOut, M4OSA_TRUE);
1333 }
1334 } else {
1335 if (pC->pC1->pSettings->FileType ==
1336 M4VIDEOEDITING_kFileType_ARGB8888) {
1337 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
1338 pC->pC1->pViDecCtxt,
1339 M4DECODER_kOptionID_EnableYuvWithEffect,
1340 (M4OSA_DataOption)M4OSA_FALSE);
1341 }
1342 if (M4NO_ERROR == err) {
1343 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(
1344 pC->pC1->pViDecCtxt, &ts,
1345 pC->pC1->m_pPreResizeFrame, M4OSA_TRUE);
1346 }
1347 }
1348 if (M4NO_ERROR != err) {
1349 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1350 m_pFctRender() returns error 0x%x", err);
1351 pC->ewc.VppError = err;
1352 return M4NO_ERROR;
1353 }
1354 if (pC->pC1->pSettings->FileType !=
1355 M4VIDEOEDITING_kFileType_ARGB8888) {
1356 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) {
1357 // Save width and height of un-rotated frame
1358 yuvFrameWidth = pC->pC1->m_pPreResizeFrame[0].u_width;
1359 yuvFrameHeight = pC->pC1->m_pPreResizeFrame[0].u_height;
1360 err = M4VSS3GPP_intRotateVideo(pC->pC1->m_pPreResizeFrame,
1361 pC->pC1->pSettings->ClipProperties.videoRotationDegrees);
1362 if (M4NO_ERROR != err) {
1363 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1364 rotateVideo() returns error 0x%x", err);
1365 pC->ewc.VppError = err;
1366 return M4NO_ERROR;
1367 }
1368 }
1369 }
1370
1371 if (pC->nbActiveEffects > 0) {
1372 pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE;
1373 /**
1374 * If we do modify the image, we need an intermediate
1375 * image plane */
1376 err = M4VSS3GPP_intAllocateYUV420(pTemp1,
1377 pC->pC1->m_pPreResizeFrame[0].u_width,
1378 pC->pC1->m_pPreResizeFrame[0].u_height);
1379 if (M4NO_ERROR != err) {
1380 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1381 M4VSS3GPP_intAllocateYUV420 error 0x%x", err);
1382 pC->ewc.VppError = err;
1383 return M4NO_ERROR;
1384 }
1385 /* If video frame need to be resized, then apply the overlay after
1386 * the frame was rendered with rendering mode.
1387 * Here skip the framing(overlay) effect when applying video Effect. */
1388 bSkipFrameEffect = M4OSA_TRUE;
1389 err = M4VSS3GPP_intApplyVideoEffect(pC,
1390 pC->pC1->m_pPreResizeFrame, pTemp1, bSkipFrameEffect);
1391 if (M4NO_ERROR != err) {
1392 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1393 M4VSS3GPP_intApplyVideoEffect() error 0x%x", err);
1394 pC->ewc.VppError = err;
1395 return M4NO_ERROR;
1396 }
1397 pDecoderRenderFrame= pTemp1;
1398
1399 } else {
1400 pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame;
1401 }
1402 /* Prepare overlay temporary buffer if overlay exist */
1403 if (pC->bClip1ActiveFramingEffect) {
1404 err = M4VSS3GPP_intAllocateYUV420(pTemp2,
1405 pPlaneOut[0].u_width, pPlaneOut[0].u_height);
1406 if (M4NO_ERROR != err) {
1407 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 \
1408 returns 0x%x, returning M4NO_ERROR", err);
1409 pC->ewc.VppError = err;
1410 return M4NO_ERROR;
1411 }
1412 pTmp = pTemp2;
1413 } else {
1414 pTmp = pPlaneOut;
1415 }
1416
1417 /* Do rendering mode. */
1418 if ((pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE) ||
1419 (pC->pC1->pSettings->FileType !=
1420 M4VIDEOEDITING_kFileType_ARGB8888)) {
1421
1422 err = M4VSS3GPP_intApplyRenderingMode(pC,
1423 pC->pC1->pSettings->xVSS.MediaRendering,
1424 pDecoderRenderFrame, pTmp);
1425 if (M4NO_ERROR != err) {
1426 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1427 M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err);
1428 pC->ewc.VppError = err;
1429 return M4NO_ERROR;
1430 }
1431 }
1432
1433 /* Apply overlay if overlay is exist */
1434 if (pC->bClip1ActiveFramingEffect) {
1435 pDecoderRenderFrame = pTmp;
1436 pTmp = pPlaneOut;
1437 err = M4VSS3GPP_intApplyVideoOverlay(pC,
1438 pDecoderRenderFrame, pTmp);
1439 if (M4NO_ERROR != err) {
1440 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1441 M4VSS3GPP_intApplyVideoOverlay) error 0x%x ", err);
1442 pC->ewc.VppError = err;
1443 return M4NO_ERROR;
1444 }
1445 }
1446
1447 if ((pC->pC1->pSettings->FileType ==
1448 M4VIDEOEDITING_kFileType_ARGB8888) &&
1449 (pC->nbActiveEffects == 0) &&
1450 (pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE)) {
1451
1452 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
1453 pC->pC1->pViDecCtxt,
1454 M4DECODER_kOptionID_YuvWithEffectNonContiguous,
1455 (M4OSA_DataOption)pTmp);
1456 if (M4NO_ERROR != err) {
1457 pC->ewc.VppError = err;
1458 return M4NO_ERROR;
1459 }
1460 pC->pC1->bGetYuvDataFromDecoder = M4OSA_FALSE;
1461 }
1462
1463 // Reset original width and height for resize frame plane
1464 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees &&
1465 180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) {
1466
1467 M4VSS3GPP_intSetYUV420Plane(pC->pC1->m_pPreResizeFrame,
1468 yuvFrameWidth, yuvFrameHeight);
1469 }
1470 }
1471 else
1472 {
1473 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO resize required");
1474 if (pC->nbActiveEffects > 0) {
1475 /** If we do modify the image, we need an
1476 * intermediate image plane */
1477 err = M4VSS3GPP_intAllocateYUV420(pTemp1,
1478 pC->ewc.uiVideoWidth,
1479 pC->ewc.uiVideoHeight);
1480 if (M4NO_ERROR != err) {
1481 pC->ewc.VppError = err;
1482 return M4NO_ERROR;
1483 }
1484 pDecoderRenderFrame = pTemp1;
1485 }
1486 else {
1487 pDecoderRenderFrame = pPlaneOut;
1488 }
1489
1490 pTmp = pPlaneOut;
1491 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(
1492 pC->pC1->pViDecCtxt, &ts,
1493 pDecoderRenderFrame, M4OSA_TRUE);
1494 if (M4NO_ERROR != err) {
1495 pC->ewc.VppError = err;
1496 return M4NO_ERROR;
1497 }
1498
1499 if (pC->nbActiveEffects > 0) {
1500 /* Here we do not skip the overlay effect since
1501 * overlay and video frame are both of same resolution */
1502 bSkipFrameEffect = M4OSA_FALSE;
1503 err = M4VSS3GPP_intApplyVideoEffect(pC,
1504 pDecoderRenderFrame,pPlaneOut,bSkipFrameEffect);
1505 }
1506 if (M4NO_ERROR != err) {
1507 pC->ewc.VppError = err;
1508 return M4NO_ERROR;
1509 }
1510 }
1511 pC->pC1->lastDecodedPlane = pTmp;
1512 pC->pC1->iVideoRenderCts = (M4OSA_Int32)ts;
1513
1514 } else {
1515 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup true");
1516
1517 if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) {
1518 /**
1519 * Copy last decoded plane to output plane */
1520 if (pC->pC1->lastDecodedPlane != M4OSA_NULL) {
1521
1522 memcpy((void *)pC->pC1->m_pPreResizeFrame[0].pac_data,
1523 (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1524 (pC->pC1->m_pPreResizeFrame[0].u_height * \
1525 pC->pC1->m_pPreResizeFrame[0].u_width));
1526
1527 memcpy((void *)pC->pC1->m_pPreResizeFrame[1].pac_data,
1528 (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1529 (pC->pC1->m_pPreResizeFrame[1].u_height * \
1530 pC->pC1->m_pPreResizeFrame[1].u_width));
1531
1532 memcpy((void *)pC->pC1->m_pPreResizeFrame[2].pac_data,
1533 (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1534 (pC->pC1->m_pPreResizeFrame[2].u_height * \
1535 pC->pC1->m_pPreResizeFrame[2].u_width));
1536 } else {
1537 err = M4VSS3GPP_ERR_NO_VALID_VID_FRAME;
1538 M4OSA_TRACE1_3("Can not find an input frame. Set error 0x%x in %s (%d)",
1539 err, __FILE__, __LINE__);
1540 pC->ewc.VppError = err;
1541 return M4NO_ERROR;
1542 }
1543
1544 if(pC->nbActiveEffects > 0) {
1545 /**
1546 * If we do modify the image, we need an
1547 * intermediate image plane */
1548 err = M4VSS3GPP_intAllocateYUV420(pTemp1,
1549 pC->pC1->m_pPreResizeFrame[0].u_width,
1550 pC->pC1->m_pPreResizeFrame[0].u_height);
1551 if (M4NO_ERROR != err) {
1552 pC->ewc.VppError = err;
1553 return M4NO_ERROR;
1554 }
1555 /* If video frame need to be resized, then apply the overlay after
1556 * the frame was rendered with rendering mode.
1557 * Here skip the framing(overlay) effect when applying video Effect. */
1558 bSkipFrameEffect = M4OSA_TRUE;
1559 err = M4VSS3GPP_intApplyVideoEffect(pC,
1560 pC->pC1->m_pPreResizeFrame,pTemp1, bSkipFrameEffect);
1561 if (M4NO_ERROR != err) {
1562 pC->ewc.VppError = err;
1563 return M4NO_ERROR;
1564 }
1565 pDecoderRenderFrame= pTemp1;
1566 } else {
1567 pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame;
1568 }
1569 /* Prepare overlay temporary buffer if overlay exist */
1570 if (pC->bClip1ActiveFramingEffect) {
1571 err = M4VSS3GPP_intAllocateYUV420(
1572 pTemp2, pC->ewc.uiVideoWidth, pC->ewc.uiVideoHeight);
1573 if (M4NO_ERROR != err) {
1574 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 \
1575 returns 0x%x, returning M4NO_ERROR", err);
1576 pC->ewc.VppError = err;
1577 return M4NO_ERROR;
1578 }
1579 pTmp = pTemp2;
1580 } else {
1581 pTmp = pPlaneOut;
1582 }
1583 /* Do rendering mode */
1584 err = M4VSS3GPP_intApplyRenderingMode(pC,
1585 pC->pC1->pSettings->xVSS.MediaRendering,
1586 pDecoderRenderFrame, pTmp);
1587 if (M4NO_ERROR != err) {
1588 pC->ewc.VppError = err;
1589 return M4NO_ERROR;
1590 }
1591 /* Apply overlay if overlay is exist */
1592 pTmp = pPlaneOut;
1593 if (pC->bClip1ActiveFramingEffect) {
1594 err = M4VSS3GPP_intApplyVideoOverlay(pC,
1595 pTemp2, pTmp);
1596 if (M4NO_ERROR != err) {
1597 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1598 M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err);
1599 pC->ewc.VppError = err;
1600 return M4NO_ERROR;
1601 }
1602 }
1603 } else {
1604
1605 err = M4VSS3GPP_intAllocateYUV420(pTemp1,
1606 pC->ewc.uiVideoWidth,
1607 pC->ewc.uiVideoHeight);
1608 if (M4NO_ERROR != err) {
1609 pC->ewc.VppError = err;
1610 return M4NO_ERROR;
1611 }
1612 /**
1613 * Copy last decoded plane to output plane */
1614 if (pC->pC1->lastDecodedPlane != M4OSA_NULL &&
1615 pLastDecodedFrame != M4OSA_NULL) {
1616 memcpy((void *)pLastDecodedFrame[0].pac_data,
1617 (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1618 (pLastDecodedFrame[0].u_height * pLastDecodedFrame[0].u_width));
1619
1620 memcpy((void *)pLastDecodedFrame[1].pac_data,
1621 (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1622 (pLastDecodedFrame[1].u_height * pLastDecodedFrame[1].u_width));
1623
1624 memcpy((void *)pLastDecodedFrame[2].pac_data,
1625 (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1626 (pLastDecodedFrame[2].u_height * pLastDecodedFrame[2].u_width));
1627 } else {
1628 err = M4VSS3GPP_ERR_NO_VALID_VID_FRAME;
1629 M4OSA_TRACE1_3("Can not find an input frame. Set error 0x%x in %s (%d)",
1630 err, __FILE__, __LINE__);
1631 pC->ewc.VppError = err;
1632 return M4NO_ERROR;
1633 }
1634
1635 pTmp = pPlaneOut;
1636 /**
1637 * Check if there is a effect */
1638 if(pC->nbActiveEffects > 0) {
1639 /* Here we do not skip the overlay effect since
1640 * overlay and video are both of same resolution */
1641 bSkipFrameEffect = M4OSA_FALSE;
1642 err = M4VSS3GPP_intApplyVideoEffect(pC,
1643 pLastDecodedFrame, pTmp,bSkipFrameEffect);
1644 if (M4NO_ERROR != err) {
1645 pC->ewc.VppError = err;
1646 return M4NO_ERROR;
1647 }
1648 }
1649 }
1650 pC->pC1->lastDecodedPlane = pTmp;
1651 }
1652
1653 M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", ts);
1654
1655 for (i=0; i<3; i++) {
1656 if (pTemp1[i].pac_data != M4OSA_NULL) {
1657 free(pTemp1[i].pac_data);
1658 pTemp1[i].pac_data = M4OSA_NULL;
1659 }
1660 }
1661 for (i=0; i<3; i++) {
1662 if (pTemp2[i].pac_data != M4OSA_NULL) {
1663 free(pTemp2[i].pac_data);
1664 pTemp2[i].pac_data = M4OSA_NULL;
1665 }
1666 }
1667 }
1668
1669 /**
1670 * Return */
1671 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR");
1672 return M4NO_ERROR;
1673 }
1674 /**
1675 ******************************************************************************
1676 * M4OSA_ERR M4VSS3GPP_intApplyVideoOverlay()
1677 * @brief Apply video overlay from pPlaneIn to pPlaneOut
1678 * @param pC (IN/OUT) Internal edit context
1679 * @param pInputPlanes (IN) Input raw YUV420 image
1680 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image
1681 * @return M4NO_ERROR: No error
1682 ******************************************************************************
1683 */
1684 static M4OSA_ERR
M4VSS3GPP_intApplyVideoOverlay(M4VSS3GPP_InternalEditContext * pC,M4VIFI_ImagePlane * pPlaneIn,M4VIFI_ImagePlane * pPlaneOut)1685 M4VSS3GPP_intApplyVideoOverlay (M4VSS3GPP_InternalEditContext *pC,
1686 M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) {
1687
1688 M4VSS3GPP_ClipContext *pClip;
1689 M4VSS3GPP_EffectSettings *pFx;
1690 M4VSS3GPP_ExternalProgress extProgress;
1691 M4OSA_Double VideoEffectTime;
1692 M4OSA_Double PercentageDone;
1693 M4OSA_UInt8 NumActiveEffects =0;
1694 M4OSA_UInt32 Cts = 0;
1695 M4OSA_Int32 nextEffectTime;
1696 M4OSA_Int32 tmp;
1697 M4OSA_UInt8 i;
1698 M4OSA_ERR err;
1699
1700 pClip = pC->pC1;
1701 if (pC->bIssecondClip == M4OSA_TRUE) {
1702 NumActiveEffects = pC->nbActiveEffects1;
1703 } else {
1704 NumActiveEffects = pC->nbActiveEffects;
1705 }
1706 for (i=0; i<NumActiveEffects; i++) {
1707 if (pC->bIssecondClip == M4OSA_TRUE) {
1708 pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]);
1709 /* Compute how far from the beginning of the effect we are, in clip-base time. */
1710 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1711 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) +
1712 pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration - pFx->uiStartTime;
1713 } else {
1714 pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]);
1715 /* Compute how far from the beginning of the effect we are, in clip-base time. */
1716 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1717 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime;
1718 }
1719 /* Do the framing(overlay) effect only,
1720 * skip other color effect which had been applied */
1721 if (pFx->xVSS.pFramingBuffer == M4OSA_NULL) {
1722 continue;
1723 }
1724
1725 /* To calculate %, substract timeIncrement because effect should finish
1726 * on the last frame which is presented from CTS = eof-timeIncrement till CTS = eof */
1727 PercentageDone = VideoEffectTime / ((M4OSA_Float)pFx->uiDuration);
1728
1729 if (PercentageDone < 0.0) {
1730 PercentageDone = 0.0;
1731 }
1732 if (PercentageDone > 1.0) {
1733 PercentageDone = 1.0;
1734 }
1735 /**
1736 * Compute where we are in the effect (scale is 0->1000) */
1737 tmp = (M4OSA_Int32)(PercentageDone * 1000);
1738
1739 /**
1740 * Set the progress info provided to the external function */
1741 extProgress.uiProgress = (M4OSA_UInt32)tmp;
1742 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1743 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
1744 extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset;
1745 extProgress.bIsLast = M4OSA_FALSE;
1746 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1747 nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \
1748 + pC->dOutputFrameDuration);
1749 if (nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) {
1750 extProgress.bIsLast = M4OSA_TRUE;
1751 }
1752 err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt,
1753 pPlaneIn, pPlaneOut, &extProgress,
1754 pFx->VideoEffectType - M4VSS3GPP_kVideoEffectType_External);
1755
1756 if (M4NO_ERROR != err) {
1757 M4OSA_TRACE1_1(
1758 "M4VSS3GPP_intApplyVideoOverlay: \
1759 External video effect function returns 0x%x!",
1760 err);
1761 return err;
1762 }
1763 }
1764
1765 /**
1766 * Return */
1767 M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoOverlay: returning M4NO_ERROR");
1768 return M4NO_ERROR;
1769 }
1770 /**
1771 ******************************************************************************
1772 * M4OSA_ERR M4VSS3GPP_intApplyVideoEffect()
1773 * @brief Apply video effect from pPlaneIn to pPlaneOut
1774 * @param pC (IN/OUT) Internal edit context
1775 * @param uiClip1orClip2 (IN/OUT) 1 for first clip, 2 for second clip
1776 * @param pInputPlanes (IN) Input raw YUV420 image
1777 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image
1778 * @param bSkipFramingEffect (IN) skip framing effect flag
1779 * @return M4NO_ERROR: No error
1780 ******************************************************************************
1781 */
1782 static M4OSA_ERR
M4VSS3GPP_intApplyVideoEffect(M4VSS3GPP_InternalEditContext * pC,M4VIFI_ImagePlane * pPlaneIn,M4VIFI_ImagePlane * pPlaneOut,M4OSA_Bool bSkipFramingEffect)1783 M4VSS3GPP_intApplyVideoEffect (M4VSS3GPP_InternalEditContext *pC,
1784 M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut,
1785 M4OSA_Bool bSkipFramingEffect) {
1786
1787 M4OSA_ERR err;
1788
1789 M4VSS3GPP_ClipContext *pClip;
1790 M4VSS3GPP_EffectSettings *pFx;
1791 M4VSS3GPP_ExternalProgress extProgress;
1792
1793 M4OSA_Double VideoEffectTime;
1794 M4OSA_Double PercentageDone;
1795 M4OSA_Int32 tmp;
1796
1797 M4VIFI_ImagePlane *pPlaneTempIn;
1798 M4VIFI_ImagePlane *pPlaneTempOut;
1799 M4VIFI_ImagePlane pTempYuvPlane[3];
1800 M4OSA_UInt8 i;
1801 M4OSA_UInt8 NumActiveEffects =0;
1802
1803
1804 pClip = pC->pC1;
1805 if (pC->bIssecondClip == M4OSA_TRUE)
1806 {
1807 NumActiveEffects = pC->nbActiveEffects1;
1808 }
1809 else
1810 {
1811 NumActiveEffects = pC->nbActiveEffects;
1812 }
1813
1814 memset((void *)pTempYuvPlane, 0, 3*sizeof(M4VIFI_ImagePlane));
1815
1816 /**
1817 * Allocate temporary plane if needed RC */
1818 if (NumActiveEffects > 1) {
1819 err = M4VSS3GPP_intAllocateYUV420(pTempYuvPlane, pPlaneOut->u_width,
1820 pPlaneOut->u_height);
1821
1822 if( M4NO_ERROR != err )
1823 {
1824 M4OSA_TRACE1_1(
1825 "M4VSS3GPP_intApplyVideoEffect: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\
1826 returning M4NO_ERROR",
1827 err);
1828 pC->ewc.VppError = err;
1829 return
1830 M4NO_ERROR; /**< Return no error to the encoder core
1831 (else it may leak in some situations...) */
1832 }
1833 }
1834
1835 if (NumActiveEffects % 2 == 0)
1836 {
1837 pPlaneTempIn = pPlaneIn;
1838 pPlaneTempOut = pTempYuvPlane;
1839 }
1840 else
1841 {
1842 pPlaneTempIn = pPlaneIn;
1843 pPlaneTempOut = pPlaneOut;
1844 }
1845
1846 for (i=0; i<NumActiveEffects; i++)
1847 {
1848 if (pC->bIssecondClip == M4OSA_TRUE)
1849 {
1850
1851
1852 pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]);
1853 /* Compute how far from the beginning of the effect we are, in clip-base time. */
1854 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1855 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) +
1856 pC->pTransitionList[pC->uiCurrentClip].
1857 uiTransitionDuration- pFx->uiStartTime;
1858 }
1859 else
1860 {
1861 pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]);
1862 /* Compute how far from the beginning of the effect we are, in clip-base time. */
1863 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1864 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime;
1865 }
1866
1867
1868
1869 /* To calculate %, substract timeIncrement because effect should finish on the last frame*/
1870 /* which is presented from CTS = eof-timeIncrement till CTS = eof */
1871 PercentageDone = VideoEffectTime
1872 / ((M4OSA_Float)pFx->uiDuration/*- pC->dOutputFrameDuration*/);
1873
1874 if( PercentageDone < 0.0 )
1875 PercentageDone = 0.0;
1876
1877 if( PercentageDone > 1.0 )
1878 PercentageDone = 1.0;
1879
1880 switch( pFx->VideoEffectType )
1881 {
1882 case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1883 /**
1884 * Compute where we are in the effect (scale is 0->1024). */
1885 tmp = (M4OSA_Int32)(PercentageDone * 1024);
1886
1887 /**
1888 * Apply the darkening effect */
1889 err =
1890 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn,
1891 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL);
1892
1893 if( M4NO_ERROR != err )
1894 {
1895 M4OSA_TRACE1_1(
1896 "M4VSS3GPP_intApplyVideoEffect:\
1897 M4VFL_modifyLumaWithScale returns error 0x%x,\
1898 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR",
1899 err);
1900 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
1901 }
1902 break;
1903
1904 case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1905 /**
1906 * Compute where we are in the effect (scale is 0->1024) */
1907 tmp = (M4OSA_Int32)(( 1.0 - PercentageDone) * 1024);
1908
1909 /**
1910 * Apply the darkening effect */
1911 err =
1912 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn,
1913 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL);
1914
1915 if( M4NO_ERROR != err )
1916 {
1917 M4OSA_TRACE1_1(
1918 "M4VSS3GPP_intApplyVideoEffect:\
1919 M4VFL_modifyLumaWithScale returns error 0x%x,\
1920 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR",
1921 err);
1922 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
1923 }
1924 break;
1925
1926 default:
1927 if( pFx->VideoEffectType
1928 >= M4VSS3GPP_kVideoEffectType_External )
1929 {
1930 M4OSA_UInt32 Cts = 0;
1931 M4OSA_Int32 nextEffectTime;
1932
1933 /**
1934 * Compute where we are in the effect (scale is 0->1000) */
1935 tmp = (M4OSA_Int32)(PercentageDone * 1000);
1936
1937 /**
1938 * Set the progress info provided to the external function */
1939 extProgress.uiProgress = (M4OSA_UInt32)tmp;
1940 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1941 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
1942 extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset;
1943 extProgress.bIsLast = M4OSA_FALSE;
1944 // Decorrelate input and output encoding timestamp to handle encoder prefetch
1945 nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \
1946 + pC->dOutputFrameDuration);
1947 if(nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration))
1948 {
1949 extProgress.bIsLast = M4OSA_TRUE;
1950 }
1951 /* Here skip the framing effect,
1952 * do the framing effect after apply rendering mode */
1953 if ((pFx->xVSS.pFramingBuffer != M4OSA_NULL) &&
1954 bSkipFramingEffect == M4OSA_TRUE) {
1955 memcpy(pPlaneTempOut[0].pac_data, pPlaneTempIn[0].pac_data,
1956 pPlaneTempIn[0].u_height * pPlaneTempIn[0].u_width);
1957 memcpy(pPlaneTempOut[1].pac_data, pPlaneTempIn[1].pac_data,
1958 pPlaneTempIn[1].u_height * pPlaneTempIn[1].u_width);
1959 memcpy(pPlaneTempOut[2].pac_data, pPlaneTempIn[2].pac_data,
1960 pPlaneTempIn[2].u_height * pPlaneTempIn[2].u_width);
1961
1962 } else {
1963 err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt,
1964 pPlaneTempIn, pPlaneTempOut, &extProgress,
1965 pFx->VideoEffectType
1966 - M4VSS3GPP_kVideoEffectType_External);
1967 }
1968 if( M4NO_ERROR != err )
1969 {
1970 M4OSA_TRACE1_1(
1971 "M4VSS3GPP_intApplyVideoEffect: \
1972 External video effect function returns 0x%x!",
1973 err);
1974 return err;
1975 }
1976 break;
1977 }
1978 else
1979 {
1980 M4OSA_TRACE1_1(
1981 "M4VSS3GPP_intApplyVideoEffect: unknown effect type (0x%x),\
1982 returning M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE",
1983 pFx->VideoEffectType);
1984 return M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE;
1985 }
1986 }
1987 /**
1988 * RC Updates pTempPlaneIn and pTempPlaneOut depending on current effect */
1989 if (((i % 2 == 0) && (NumActiveEffects % 2 == 0))
1990 || ((i % 2 != 0) && (NumActiveEffects % 2 != 0)))
1991 {
1992 pPlaneTempIn = pTempYuvPlane;
1993 pPlaneTempOut = pPlaneOut;
1994 }
1995 else
1996 {
1997 pPlaneTempIn = pPlaneOut;
1998 pPlaneTempOut = pTempYuvPlane;
1999 }
2000 }
2001
2002 for(i=0; i<3; i++) {
2003 if(pTempYuvPlane[i].pac_data != M4OSA_NULL) {
2004 free(pTempYuvPlane[i].pac_data);
2005 pTempYuvPlane[i].pac_data = M4OSA_NULL;
2006 }
2007 }
2008
2009 /**
2010 * Return */
2011 M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoEffect: returning M4NO_ERROR");
2012 return M4NO_ERROR;
2013 }
2014
2015 /**
2016 ******************************************************************************
2017 * M4OSA_ERR M4VSS3GPP_intVideoTransition()
2018 * @brief Apply video transition effect pC1+pC2->pPlaneOut
2019 * @param pC (IN/OUT) Internal edit context
2020 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image
2021 * @return M4NO_ERROR: No error
2022 ******************************************************************************
2023 */
2024 static M4OSA_ERR
M4VSS3GPP_intVideoTransition(M4VSS3GPP_InternalEditContext * pC,M4VIFI_ImagePlane * pPlaneOut)2025 M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC,
2026 M4VIFI_ImagePlane *pPlaneOut )
2027 {
2028 M4OSA_ERR err;
2029 M4OSA_Int32 iProgress;
2030 M4VSS3GPP_ExternalProgress extProgress;
2031 M4VIFI_ImagePlane *pPlane;
2032 M4OSA_Int32 i;
2033 const M4OSA_Int32 iDur = (M4OSA_Int32)pC->
2034 pTransitionList[pC->uiCurrentClip].uiTransitionDuration;
2035
2036 /**
2037 * Compute how far from the end cut we are, in clip-base time.
2038 * It is done with integers because the offset and begin cut have been rounded already. */
2039 // Decorrelate input and output encoding timestamp to handle encoder prefetch
2040 iProgress = (M4OSA_Int32)((M4OSA_Double)pC->pC1->iEndTime) - pC->ewc.dInputVidCts +
2041 ((M4OSA_Double)pC->pC1->iVoffset);
2042 /**
2043 * We must remove the duration of one frame, else we would almost never reach the end
2044 * (It's kind of a "pile and intervals" issue). */
2045 iProgress -= (M4OSA_Int32)pC->dOutputFrameDuration;
2046
2047 if( iProgress < 0 ) /**< Sanity checks */
2048 {
2049 iProgress = 0;
2050 }
2051
2052 /**
2053 * Compute where we are in the transition, on a base 1000 */
2054 iProgress = ( ( iDur - iProgress) * 1000) / iDur;
2055
2056 /**
2057 * Sanity checks */
2058 if( iProgress < 0 )
2059 {
2060 iProgress = 0;
2061 }
2062 else if( iProgress > 1000 )
2063 {
2064 iProgress = 1000;
2065 }
2066
2067 switch( pC->pTransitionList[pC->uiCurrentClip].TransitionBehaviour )
2068 {
2069 case M4VSS3GPP_TransitionBehaviour_SpeedUp:
2070 iProgress = ( iProgress * iProgress) / 1000;
2071 break;
2072
2073 case M4VSS3GPP_TransitionBehaviour_Linear:
2074 /*do nothing*/
2075 break;
2076
2077 case M4VSS3GPP_TransitionBehaviour_SpeedDown:
2078 iProgress = (M4OSA_Int32)(sqrt(iProgress * 1000));
2079 break;
2080
2081 case M4VSS3GPP_TransitionBehaviour_SlowMiddle:
2082 if( iProgress < 500 )
2083 {
2084 iProgress = (M4OSA_Int32)(sqrt(iProgress * 500));
2085 }
2086 else
2087 {
2088 iProgress =
2089 (M4OSA_Int32)(( ( ( iProgress - 500) * (iProgress - 500))
2090 / 500) + 500);
2091 }
2092 break;
2093
2094 case M4VSS3GPP_TransitionBehaviour_FastMiddle:
2095 if( iProgress < 500 )
2096 {
2097 iProgress = (M4OSA_Int32)(( iProgress * iProgress) / 500);
2098 }
2099 else
2100 {
2101 iProgress = (M4OSA_Int32)(sqrt(( iProgress - 500) * 500) + 500);
2102 }
2103 break;
2104
2105 default:
2106 /*do nothing*/
2107 break;
2108 }
2109
2110 switch( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType )
2111 {
2112 case M4VSS3GPP_kVideoTransitionType_CrossFade:
2113 /**
2114 * Apply the transition effect */
2115 err = M4VIFI_ImageBlendingonYUV420(M4OSA_NULL,
2116 (M4ViComImagePlane *)pC->yuv1,
2117 (M4ViComImagePlane *)pC->yuv2,
2118 (M4ViComImagePlane *)pPlaneOut, iProgress);
2119
2120 if( M4NO_ERROR != err )
2121 {
2122 M4OSA_TRACE1_1(
2123 "M4VSS3GPP_intVideoTransition:\
2124 M4VIFI_ImageBlendingonYUV420 returns error 0x%x,\
2125 returning M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR",
2126 err);
2127 return M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR;
2128 }
2129 break;
2130
2131 case M4VSS3GPP_kVideoTransitionType_None:
2132 /**
2133 * This is a stupid-non optimized version of the None transition...
2134 * We copy the YUV frame */
2135 if( iProgress < 500 ) /**< first half of transition */
2136 {
2137 pPlane = pC->yuv1;
2138 }
2139 else /**< second half of transition */
2140 {
2141 pPlane = pC->yuv2;
2142 }
2143 /**
2144 * Copy the input YUV frames */
2145 i = 3;
2146
2147 while( i-- > 0 )
2148 {
2149 memcpy((void *)pPlaneOut[i].pac_data,
2150 (void *)pPlane[i].pac_data,
2151 pPlaneOut[i].u_stride * pPlaneOut[i].u_height);
2152 }
2153 break;
2154
2155 default:
2156 if( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType
2157 >= M4VSS3GPP_kVideoTransitionType_External )
2158 {
2159 /**
2160 * Set the progress info provided to the external function */
2161 extProgress.uiProgress = (M4OSA_UInt32)iProgress;
2162 // Decorrelate input and output encoding timestamp to handle encoder prefetch
2163 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
2164 extProgress.uiClipTime = extProgress.uiOutputTime - pC->pC1->iVoffset;
2165
2166 err = pC->pTransitionList[pC->
2167 uiCurrentClip].ExtVideoTransitionFct(
2168 pC->pTransitionList[pC->
2169 uiCurrentClip].pExtVideoTransitionFctCtxt,
2170 pC->yuv1, pC->yuv2, pPlaneOut, &extProgress,
2171 pC->pTransitionList[pC->
2172 uiCurrentClip].VideoTransitionType
2173 - M4VSS3GPP_kVideoTransitionType_External);
2174
2175 if( M4NO_ERROR != err )
2176 {
2177 M4OSA_TRACE1_1(
2178 "M4VSS3GPP_intVideoTransition:\
2179 External video transition function returns 0x%x!",
2180 err);
2181 return err;
2182 }
2183 break;
2184 }
2185 else
2186 {
2187 M4OSA_TRACE1_1(
2188 "M4VSS3GPP_intVideoTransition: unknown transition type (0x%x),\
2189 returning M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE",
2190 pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType);
2191 return M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE;
2192 }
2193 }
2194
2195 /**
2196 * Return */
2197 M4OSA_TRACE3_0("M4VSS3GPP_intVideoTransition: returning M4NO_ERROR");
2198 return M4NO_ERROR;
2199 }
2200
2201 /**
2202 ******************************************************************************
2203 * M4OSA_Void M4VSS3GPP_intUpdateTimeInfo()
2204 * @brief Update bit stream time info by Counter Time System to be compliant with
2205 * players using bit stream time info
2206 * @note H263 uses an absolute time counter unlike MPEG4 which uses Group Of Vops
2207 * (GOV, see the standard)
2208 * @param pC (IN/OUT) returns time updated video AU,
2209 * the offset between system and video time (MPEG4 only)
2210 * and the state of the current clip (MPEG4 only)
2211 * @return nothing
2212 ******************************************************************************
2213 */
2214 static M4OSA_Void
M4VSS3GPP_intUpdateTimeInfo(M4VSS3GPP_InternalEditContext * pC,M4SYS_AccessUnit * pAU)2215 M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC,
2216 M4SYS_AccessUnit *pAU )
2217 {
2218 M4OSA_UInt8 uiTmp;
2219 M4OSA_UInt32 uiCts = 0;
2220 M4OSA_MemAddr8 pTmp;
2221 M4OSA_UInt32 uiAdd;
2222 M4OSA_UInt32 uiCurrGov;
2223 M4OSA_Int8 iDiff;
2224
2225 M4VSS3GPP_ClipContext *pClipCtxt = pC->pC1;
2226 M4OSA_Int32 *pOffset = &(pC->ewc.iMpeg4GovOffset);
2227
2228 /**
2229 * Set H263 time counter from system time */
2230 if( M4SYS_kH263 == pAU->stream->streamType )
2231 {
2232 uiTmp = (M4OSA_UInt8)((M4OSA_UInt32)( ( pAU->CTS * 30) / 1001 + 0.5)
2233 % M4VSS3GPP_EDIT_H263_MODULO_TIME);
2234 M4VSS3GPP_intSetH263TimeCounter((M4OSA_MemAddr8)(pAU->dataAddress),
2235 uiTmp);
2236 }
2237 /*
2238 * Set MPEG4 GOV time counter regarding video and system time */
2239 else if( M4SYS_kMPEG_4 == pAU->stream->streamType )
2240 {
2241 /*
2242 * If GOV.
2243 * beware of little/big endian! */
2244 /* correction: read 8 bits block instead of one 32 bits block */
2245 M4OSA_UInt8 *temp8 = (M4OSA_UInt8 *)(pAU->dataAddress);
2246 M4OSA_UInt32 temp32 = 0;
2247
2248 temp32 = ( 0x000000ff & (M4OSA_UInt32)(*temp8))
2249 + (0x0000ff00 & ((M4OSA_UInt32)(*(temp8 + 1))) << 8)
2250 + (0x00ff0000 & ((M4OSA_UInt32)(*(temp8 + 2))) << 16)
2251 + (0xff000000 & ((M4OSA_UInt32)(*(temp8 + 3))) << 24);
2252
2253 M4OSA_TRACE3_2("RC: Temp32: 0x%x, dataAddress: 0x%x\n", temp32,
2254 *(pAU->dataAddress));
2255
2256 if( M4VSS3GPP_EDIT_GOV_HEADER == temp32 )
2257 {
2258 pTmp =
2259 (M4OSA_MemAddr8)(pAU->dataAddress
2260 + 1); /**< Jump to the time code (just after the 32 bits header) */
2261 uiAdd = (M4OSA_UInt32)(pAU->CTS)+( *pOffset);
2262
2263 switch( pClipCtxt->bMpeg4GovState )
2264 {
2265 case M4OSA_FALSE: /*< INIT */
2266 {
2267 /* video time = ceil (system time + offset) */
2268 uiCts = ( uiAdd + 999) / 1000;
2269
2270 /* offset update */
2271 ( *pOffset) += (( uiCts * 1000) - uiAdd);
2272
2273 /* Save values */
2274 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts;
2275
2276 /* State to 'first' */
2277 pClipCtxt->bMpeg4GovState = M4OSA_TRUE;
2278 }
2279 break;
2280
2281 case M4OSA_TRUE: /*< UPDATE */
2282 {
2283 /* Get current Gov value */
2284 M4VSS3GPP_intGetMPEG4Gov(pTmp, &uiCurrGov);
2285
2286 /* video time = floor or ceil (system time + offset) */
2287 uiCts = (uiAdd / 1000);
2288 iDiff = (M4OSA_Int8)(uiCurrGov
2289 - pClipCtxt->uiMpeg4PrevGovValueGet - uiCts
2290 + pClipCtxt->uiMpeg4PrevGovValueSet);
2291
2292 /* ceiling */
2293 if( iDiff > 0 )
2294 {
2295 uiCts += (M4OSA_UInt32)(iDiff);
2296
2297 /* offset update */
2298 ( *pOffset) += (( uiCts * 1000) - uiAdd);
2299 }
2300
2301 /* Save values */
2302 pClipCtxt->uiMpeg4PrevGovValueGet = uiCurrGov;
2303 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts;
2304 }
2305 break;
2306 }
2307
2308 M4VSS3GPP_intSetMPEG4Gov(pTmp, uiCts);
2309 }
2310 }
2311 return;
2312 }
2313
2314 /**
2315 ******************************************************************************
2316 * M4OSA_Void M4VSS3GPP_intCheckVideoEffects()
2317 * @brief Check which video effect must be applied at the current time
2318 ******************************************************************************
2319 */
2320 static M4OSA_Void
M4VSS3GPP_intCheckVideoEffects(M4VSS3GPP_InternalEditContext * pC,M4OSA_UInt8 uiClipNumber)2321 M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC,
2322 M4OSA_UInt8 uiClipNumber )
2323 {
2324 M4OSA_UInt8 uiClipIndex;
2325 M4OSA_UInt8 uiFxIndex, i;
2326 M4VSS3GPP_ClipContext *pClip;
2327 M4VSS3GPP_EffectSettings *pFx;
2328 M4OSA_Int32 Off, BC, EC;
2329 // Decorrelate input and output encoding timestamp to handle encoder prefetch
2330 M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts;
2331
2332 uiClipIndex = pC->uiCurrentClip;
2333 if (uiClipNumber == 1) {
2334 pClip = pC->pC1;
2335 pC->bClip1ActiveFramingEffect = M4OSA_FALSE;
2336 } else {
2337 pClip = pC->pC2;
2338 pC->bClip2ActiveFramingEffect = M4OSA_FALSE;
2339 }
2340 /**
2341 * Shortcuts for code readability */
2342 Off = pClip->iVoffset;
2343 BC = pClip->iActualVideoBeginCut;
2344 EC = pClip->iEndTime;
2345
2346 i = 0;
2347
2348 for ( uiFxIndex = 0; uiFxIndex < pC->nbEffects; uiFxIndex++ )
2349 {
2350 /** Shortcut, reverse order because of priority between effects(EndEffect always clean )*/
2351 pFx = &(pC->pEffectsList[pC->nbEffects - 1 - uiFxIndex]);
2352
2353 if( M4VSS3GPP_kVideoEffectType_None != pFx->VideoEffectType )
2354 {
2355 /**
2356 * Check if there is actually a video effect */
2357
2358 if(uiClipNumber ==1)
2359 {
2360 /**< Are we after the start time of the effect?
2361 * or Are we into the effect duration?
2362 */
2363 if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) &&
2364 (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
2365 /**
2366 * Set the active effect(s) */
2367 pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex;
2368
2369 /**
2370 * Update counter of active effects */
2371 i++;
2372 if (pFx->xVSS.pFramingBuffer != M4OSA_NULL) {
2373 pC->bClip1ActiveFramingEffect = M4OSA_TRUE;
2374 }
2375
2376 /**
2377 * For all external effects set this flag to true. */
2378 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External)
2379 {
2380 pC->m_bClipExternalHasStarted = M4OSA_TRUE;
2381 }
2382 }
2383
2384 }
2385 else
2386 {
2387 /**< Are we into the effect duration? */
2388 if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
2389 >= (M4OSA_Int32)(pFx->uiStartTime))
2390 && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
2391 <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
2392 /**
2393 * Set the active effect(s) */
2394 pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex;
2395
2396 /**
2397 * Update counter of active effects */
2398 i++;
2399 if (pFx->xVSS.pFramingBuffer != M4OSA_NULL) {
2400 pC->bClip2ActiveFramingEffect = M4OSA_TRUE;
2401 }
2402 /**
2403 * For all external effects set this flag to true. */
2404 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External)
2405 {
2406 pC->m_bClipExternalHasStarted = M4OSA_TRUE;
2407 }
2408
2409 /**
2410 * The third effect has the highest priority, then the second one, then the first one.
2411 * Hence, as soon as we found an active effect, we can get out of this loop */
2412 }
2413 }
2414 if (M4VIDEOEDITING_kH264 !=
2415 pC->pC1->pSettings->ClipProperties.VideoStreamType) {
2416
2417 // For Mpeg4 and H263 clips, full decode encode not required
2418 pC->m_bClipExternalHasStarted = M4OSA_FALSE;
2419 }
2420 }
2421 }
2422 if(1==uiClipNumber)
2423 {
2424 /**
2425 * Save number of active effects */
2426 pC->nbActiveEffects = i;
2427 }
2428 else
2429 {
2430 pC->nbActiveEffects1 = i;
2431 }
2432
2433 /**
2434 * Change the absolut time to clip related time */
2435 t -= Off;
2436
2437 /**
2438 * Check if we are on the begin cut (for clip1 only) */
2439 if( ( 0 != BC) && (t == BC) && (1 == uiClipNumber) )
2440 {
2441 pC->bClip1AtBeginCut = M4OSA_TRUE;
2442 }
2443 else
2444 {
2445 pC->bClip1AtBeginCut = M4OSA_FALSE;
2446 }
2447
2448 return;
2449 }
2450
2451 /**
2452 ******************************************************************************
2453 * M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder()
2454 * @brief Creates the video encoder
2455 * @note
2456 ******************************************************************************
2457 */
M4VSS3GPP_intCreateVideoEncoder(M4VSS3GPP_InternalEditContext * pC)2458 M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC )
2459 {
2460 M4OSA_ERR err;
2461 M4ENCODER_AdvancedParams EncParams;
2462
2463 /**
2464 * Simulate a writer interface with our specific function */
2465 pC->ewc.OurWriterDataInterface.pProcessAU =
2466 M4VSS3GPP_intProcessAU; /**< This function is VSS 3GPP specific,
2467 but it follow the writer interface */
2468 pC->ewc.OurWriterDataInterface.pStartAU =
2469 M4VSS3GPP_intStartAU; /**< This function is VSS 3GPP specific,
2470 but it follow the writer interface */
2471 pC->ewc.OurWriterDataInterface.pWriterContext =
2472 (M4WRITER_Context)
2473 pC; /**< We give the internal context as writer context */
2474
2475 /**
2476 * Get the encoder interface, if not already done */
2477 if( M4OSA_NULL == pC->ShellAPI.pVideoEncoderGlobalFcts )
2478 {
2479 err = M4VSS3GPP_setCurrentVideoEncoder(&pC->ShellAPI,
2480 pC->ewc.VideoStreamType);
2481 M4OSA_TRACE1_1(
2482 "M4VSS3GPP_intCreateVideoEncoder: setCurrentEncoder returns 0x%x",
2483 err);
2484 M4ERR_CHECK_RETURN(err);
2485 }
2486
2487 /**
2488 * Set encoder shell parameters according to VSS settings */
2489
2490 /* Common parameters */
2491 EncParams.InputFormat = M4ENCODER_kIYUV420;
2492 EncParams.FrameWidth = pC->ewc.uiVideoWidth;
2493 EncParams.FrameHeight = pC->ewc.uiVideoHeight;
2494 EncParams.uiTimeScale = pC->ewc.uiVideoTimeScale;
2495
2496 if( pC->bIsMMS == M4OSA_FALSE )
2497 {
2498 /* No strict regulation in video editor */
2499 /* Because of the effects and transitions we should allow more flexibility */
2500 /* Also it prevents to drop important frames (with a bad result on sheduling and
2501 block effetcs) */
2502 EncParams.bInternalRegulation = M4OSA_FALSE;
2503 // Variable framerate is not supported by StageFright encoders
2504 EncParams.FrameRate = M4ENCODER_k30_FPS;
2505 }
2506 else
2507 {
2508 /* In case of MMS mode, we need to enable bitrate regulation to be sure */
2509 /* to reach the targeted output file size */
2510 EncParams.bInternalRegulation = M4OSA_TRUE;
2511 EncParams.FrameRate = pC->MMSvideoFramerate;
2512 }
2513
2514 /**
2515 * Other encoder settings (defaults) */
2516 EncParams.uiHorizontalSearchRange = 0; /* use default */
2517 EncParams.uiVerticalSearchRange = 0; /* use default */
2518 EncParams.bErrorResilience = M4OSA_FALSE; /* no error resilience */
2519 EncParams.uiIVopPeriod = 0; /* use default */
2520 EncParams.uiMotionEstimationTools = 0; /* M4V_MOTION_EST_TOOLS_ALL */
2521 EncParams.bAcPrediction = M4OSA_TRUE; /* use AC prediction */
2522 EncParams.uiStartingQuantizerValue = 10; /* initial QP = 10 */
2523 EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */
2524
2525 /**
2526 * Set the video profile and level */
2527 EncParams.videoProfile = pC->ewc.outputVideoProfile;
2528 EncParams.videoLevel= pC->ewc.outputVideoLevel;
2529
2530 switch ( pC->ewc.VideoStreamType )
2531 {
2532 case M4SYS_kH263:
2533
2534 EncParams.Format = M4ENCODER_kH263;
2535
2536 EncParams.uiStartingQuantizerValue = 10;
2537 EncParams.uiRateFactor = 1; /* default */
2538
2539 EncParams.bErrorResilience = M4OSA_FALSE;
2540 EncParams.bDataPartitioning = M4OSA_FALSE;
2541 break;
2542
2543 case M4SYS_kMPEG_4:
2544
2545 EncParams.Format = M4ENCODER_kMPEG4;
2546
2547 EncParams.uiStartingQuantizerValue = 8;
2548 EncParams.uiRateFactor = (M4OSA_UInt8)(( pC->dOutputFrameDuration
2549 * pC->ewc.uiVideoTimeScale) / 1000.0 + 0.5);
2550
2551 if( EncParams.uiRateFactor == 0 )
2552 EncParams.uiRateFactor = 1; /* default */
2553
2554 if( M4OSA_FALSE == pC->ewc.bVideoDataPartitioning )
2555 {
2556 EncParams.bErrorResilience = M4OSA_FALSE;
2557 EncParams.bDataPartitioning = M4OSA_FALSE;
2558 }
2559 else
2560 {
2561 EncParams.bErrorResilience = M4OSA_TRUE;
2562 EncParams.bDataPartitioning = M4OSA_TRUE;
2563 }
2564 break;
2565
2566 case M4SYS_kH264:
2567 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: M4SYS_H264");
2568
2569 EncParams.Format = M4ENCODER_kH264;
2570
2571 EncParams.uiStartingQuantizerValue = 10;
2572 EncParams.uiRateFactor = 1; /* default */
2573
2574 EncParams.bErrorResilience = M4OSA_FALSE;
2575 EncParams.bDataPartitioning = M4OSA_FALSE;
2576 //EncParams.FrameRate = M4VIDEOEDITING_k5_FPS;
2577 break;
2578
2579 default:
2580 M4OSA_TRACE1_1(
2581 "M4VSS3GPP_intCreateVideoEncoder: Unknown videoStreamType 0x%x",
2582 pC->ewc.VideoStreamType);
2583 return M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT;
2584 }
2585
2586 if( pC->bIsMMS == M4OSA_FALSE )
2587 {
2588 EncParams.Bitrate = pC->xVSS.outputVideoBitrate;
2589
2590 }
2591 else
2592 {
2593 EncParams.Bitrate = pC->uiMMSVideoBitrate; /* RC */
2594 EncParams.uiTimeScale = 0; /* We let the encoder choose the timescale */
2595 }
2596
2597 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctInit");
2598 /**
2599 * Init the video encoder (advanced settings version of the encoder Open function) */
2600 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctInit(&pC->ewc.pEncContext,
2601 &pC->ewc.OurWriterDataInterface, M4VSS3GPP_intVPP, pC,
2602 pC->ShellAPI.pCurrentVideoEncoderExternalAPI,
2603 pC->ShellAPI.pCurrentVideoEncoderUserData);
2604
2605 if( M4NO_ERROR != err )
2606 {
2607 M4OSA_TRACE1_1(
2608 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctInit returns 0x%x",
2609 err);
2610 return err;
2611 }
2612
2613 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed;
2614 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctOpen");
2615
2616 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctOpen(pC->ewc.pEncContext,
2617 &pC->ewc.WriterVideoAU, &EncParams);
2618
2619 if( M4NO_ERROR != err )
2620 {
2621 M4OSA_TRACE1_1(
2622 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctOpen returns 0x%x",
2623 err);
2624 return err;
2625 }
2626
2627 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped;
2628 M4OSA_TRACE1_0(
2629 "M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctStart");
2630
2631 if( M4OSA_NULL != pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart )
2632 {
2633 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart(
2634 pC->ewc.pEncContext);
2635
2636 if( M4NO_ERROR != err )
2637 {
2638 M4OSA_TRACE1_1(
2639 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctStart returns 0x%x",
2640 err);
2641 return err;
2642 }
2643 }
2644
2645 pC->ewc.encoderState = M4VSS3GPP_kEncoderRunning;
2646
2647 /**
2648 * Return */
2649 M4OSA_TRACE3_0("M4VSS3GPP_intCreateVideoEncoder: returning M4NO_ERROR");
2650 return M4NO_ERROR;
2651 }
2652
2653 /**
2654 ******************************************************************************
2655 * M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder()
2656 * @brief Destroy the video encoder
2657 * @note
2658 ******************************************************************************
2659 */
M4VSS3GPP_intDestroyVideoEncoder(M4VSS3GPP_InternalEditContext * pC)2660 M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder( M4VSS3GPP_InternalEditContext *pC )
2661 {
2662 M4OSA_ERR err = M4NO_ERROR;
2663
2664 if( M4OSA_NULL != pC->ewc.pEncContext )
2665 {
2666 if( M4VSS3GPP_kEncoderRunning == pC->ewc.encoderState )
2667 {
2668 if( pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop != M4OSA_NULL )
2669 {
2670 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop(
2671 pC->ewc.pEncContext);
2672
2673 if( M4NO_ERROR != err )
2674 {
2675 M4OSA_TRACE1_1(
2676 "M4VSS3GPP_intDestroyVideoEncoder:\
2677 pVideoEncoderGlobalFcts->pFctStop returns 0x%x",
2678 err);
2679 /* Well... how the heck do you handle a failed cleanup? */
2680 }
2681 }
2682
2683 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped;
2684 }
2685
2686 /* Has the encoder actually been opened? Don't close it if that's not the case. */
2687 if( M4VSS3GPP_kEncoderStopped == pC->ewc.encoderState )
2688 {
2689 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctClose(
2690 pC->ewc.pEncContext);
2691
2692 if( M4NO_ERROR != err )
2693 {
2694 M4OSA_TRACE1_1(
2695 "M4VSS3GPP_intDestroyVideoEncoder:\
2696 pVideoEncoderGlobalFcts->pFctClose returns 0x%x",
2697 err);
2698 /* Well... how the heck do you handle a failed cleanup? */
2699 }
2700
2701 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed;
2702 }
2703
2704 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctCleanup(
2705 pC->ewc.pEncContext);
2706
2707 if( M4NO_ERROR != err )
2708 {
2709 M4OSA_TRACE1_1(
2710 "M4VSS3GPP_intDestroyVideoEncoder:\
2711 pVideoEncoderGlobalFcts->pFctCleanup returns 0x%x!",
2712 err);
2713 /**< We do not return the error here because we still have stuff to free */
2714 }
2715
2716 pC->ewc.encoderState = M4VSS3GPP_kNoEncoder;
2717 /**
2718 * Reset variable */
2719 pC->ewc.pEncContext = M4OSA_NULL;
2720 }
2721
2722 M4OSA_TRACE3_1("M4VSS3GPP_intDestroyVideoEncoder: returning 0x%x", err);
2723 return err;
2724 }
2725
2726 /**
2727 ******************************************************************************
2728 * M4OSA_Void M4VSS3GPP_intSetH263TimeCounter()
2729 * @brief Modify the time counter of the given H263 video AU
2730 * @note
2731 * @param pAuDataBuffer (IN/OUT) H263 Video AU to modify
2732 * @param uiCts (IN) New time counter value
2733 * @return nothing
2734 ******************************************************************************
2735 */
M4VSS3GPP_intSetH263TimeCounter(M4OSA_MemAddr8 pAuDataBuffer,M4OSA_UInt8 uiCts)2736 static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer,
2737 M4OSA_UInt8 uiCts )
2738 {
2739 /*
2740 * The H263 time counter is 8 bits located on the "x" below:
2741 *
2742 * |--------|--------|--------|--------|
2743 * ???????? ???????? ??????xx xxxxxx??
2744 */
2745
2746 /**
2747 * Write the 2 bits on the third byte */
2748 pAuDataBuffer[2] = ( pAuDataBuffer[2] & 0xFC) | (( uiCts >> 6) & 0x3);
2749
2750 /**
2751 * Write the 6 bits on the fourth byte */
2752 pAuDataBuffer[3] = ( ( uiCts << 2) & 0xFC) | (pAuDataBuffer[3] & 0x3);
2753
2754 return;
2755 }
2756
2757 /**
2758 ******************************************************************************
2759 * M4OSA_Void M4VSS3GPP_intSetMPEG4Gov()
2760 * @brief Modify the time info from Group Of VOP video AU
2761 * @note
2762 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify
2763 * @param uiCtsSec (IN) New GOV time info in second unit
2764 * @return nothing
2765 ******************************************************************************
2766 */
M4VSS3GPP_intSetMPEG4Gov(M4OSA_MemAddr8 pAuDataBuffer,M4OSA_UInt32 uiCtsSec)2767 static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
2768 M4OSA_UInt32 uiCtsSec )
2769 {
2770 /*
2771 * The MPEG-4 time code length is 18 bits:
2772 *
2773 * hh mm marker ss
2774 * xxxxx|xxx xxx 1 xxxx xx ??????
2775 * |----- ---|--- - ----|-- ------|
2776 */
2777 M4OSA_UInt8 uiHh;
2778 M4OSA_UInt8 uiMm;
2779 M4OSA_UInt8 uiSs;
2780 M4OSA_UInt8 uiTmp;
2781
2782 /**
2783 * Write the 2 last bits ss */
2784 uiSs = (M4OSA_UInt8)(uiCtsSec % 60); /**< modulo part */
2785 pAuDataBuffer[2] = (( ( uiSs & 0x03) << 6) | (pAuDataBuffer[2] & 0x3F));
2786
2787 if( uiCtsSec < 60 )
2788 {
2789 /**
2790 * Write the 3 last bits of mm, the marker bit (0x10 */
2791 pAuDataBuffer[1] = (( 0x10) | (uiSs >> 2));
2792
2793 /**
2794 * Write the 5 bits of hh and 3 of mm (out of 6) */
2795 pAuDataBuffer[0] = 0;
2796 }
2797 else
2798 {
2799 /**
2800 * Write the 3 last bits of mm, the marker bit (0x10 */
2801 uiTmp = (M4OSA_UInt8)(uiCtsSec / 60); /**< integer part */
2802 uiMm = (M4OSA_UInt8)(uiTmp % 60);
2803 pAuDataBuffer[1] = (( uiMm << 5) | (0x10) | (uiSs >> 2));
2804
2805 if( uiTmp < 60 )
2806 {
2807 /**
2808 * Write the 5 bits of hh and 3 of mm (out of 6) */
2809 pAuDataBuffer[0] = ((uiMm >> 3));
2810 }
2811 else
2812 {
2813 /**
2814 * Write the 5 bits of hh and 3 of mm (out of 6) */
2815 uiHh = (M4OSA_UInt8)(uiTmp / 60);
2816 pAuDataBuffer[0] = (( uiHh << 3) | (uiMm >> 3));
2817 }
2818 }
2819 return;
2820 }
2821
2822 /**
2823 ******************************************************************************
2824 * M4OSA_Void M4VSS3GPP_intGetMPEG4Gov()
2825 * @brief Get the time info from Group Of VOP video AU
2826 * @note
2827 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify
2828 * @param pCtsSec (OUT) Current GOV time info in second unit
2829 * @return nothing
2830 ******************************************************************************
2831 */
M4VSS3GPP_intGetMPEG4Gov(M4OSA_MemAddr8 pAuDataBuffer,M4OSA_UInt32 * pCtsSec)2832 static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
2833 M4OSA_UInt32 *pCtsSec )
2834 {
2835 /*
2836 * The MPEG-4 time code length is 18 bits:
2837 *
2838 * hh mm marker ss
2839 * xxxxx|xxx xxx 1 xxxx xx ??????
2840 * |----- ---|--- - ----|-- ------|
2841 */
2842 M4OSA_UInt8 uiHh;
2843 M4OSA_UInt8 uiMm;
2844 M4OSA_UInt8 uiSs;
2845 M4OSA_UInt8 uiTmp;
2846 M4OSA_UInt32 uiCtsSec;
2847
2848 /**
2849 * Read ss */
2850 uiSs = (( pAuDataBuffer[2] & 0xC0) >> 6);
2851 uiTmp = (( pAuDataBuffer[1] & 0x0F) << 2);
2852 uiCtsSec = uiSs + uiTmp;
2853
2854 /**
2855 * Read mm */
2856 uiMm = (( pAuDataBuffer[1] & 0xE0) >> 5);
2857 uiTmp = (( pAuDataBuffer[0] & 0x07) << 3);
2858 uiMm = uiMm + uiTmp;
2859 uiCtsSec = ( uiMm * 60) + uiCtsSec;
2860
2861 /**
2862 * Read hh */
2863 uiHh = (( pAuDataBuffer[0] & 0xF8) >> 3);
2864
2865 if( uiHh )
2866 {
2867 uiCtsSec = ( uiHh * 3600) + uiCtsSec;
2868 }
2869
2870 /*
2871 * in sec */
2872 *pCtsSec = uiCtsSec;
2873
2874 return;
2875 }
2876
2877 /**
2878 ******************************************************************************
2879 * M4OSA_ERR M4VSS3GPP_intAllocateYUV420()
2880 * @brief Allocate the three YUV 4:2:0 planes
2881 * @note
2882 * @param pPlanes (IN/OUT) valid pointer to 3 M4VIFI_ImagePlane structures
2883 * @param uiWidth (IN) Image width
2884 * @param uiHeight(IN) Image height
2885 ******************************************************************************
2886 */
M4VSS3GPP_intAllocateYUV420(M4VIFI_ImagePlane * pPlanes,M4OSA_UInt32 uiWidth,M4OSA_UInt32 uiHeight)2887 static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes,
2888 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight )
2889 {
2890 if (pPlanes == M4OSA_NULL) {
2891 M4OSA_TRACE1_0("M4VSS3GPP_intAllocateYUV420: Invalid pPlanes pointer");
2892 return M4ERR_PARAMETER;
2893 }
2894 /* if the buffer is not NULL and same size with target size,
2895 * do not malloc again*/
2896 if (pPlanes[0].pac_data != M4OSA_NULL &&
2897 pPlanes[0].u_width == uiWidth &&
2898 pPlanes[0].u_height == uiHeight) {
2899 return M4NO_ERROR;
2900 }
2901
2902 pPlanes[0].u_width = uiWidth;
2903 pPlanes[0].u_height = uiHeight;
2904 pPlanes[0].u_stride = uiWidth;
2905 pPlanes[0].u_topleft = 0;
2906
2907 if (pPlanes[0].pac_data != M4OSA_NULL) {
2908 free(pPlanes[0].pac_data);
2909 pPlanes[0].pac_data = M4OSA_NULL;
2910 }
2911 pPlanes[0].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[0].u_stride
2912 * pPlanes[0].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[0].pac_data");
2913
2914 if( M4OSA_NULL == pPlanes[0].pac_data )
2915 {
2916 M4OSA_TRACE1_0(
2917 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[0].pac_data,\
2918 returning M4ERR_ALLOC");
2919 return M4ERR_ALLOC;
2920 }
2921
2922 pPlanes[1].u_width = pPlanes[0].u_width >> 1;
2923 pPlanes[1].u_height = pPlanes[0].u_height >> 1;
2924 pPlanes[1].u_stride = pPlanes[1].u_width;
2925 pPlanes[1].u_topleft = 0;
2926 if (pPlanes[1].pac_data != M4OSA_NULL) {
2927 free(pPlanes[1].pac_data);
2928 pPlanes[1].pac_data = M4OSA_NULL;
2929 }
2930 pPlanes[1].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[1].u_stride
2931 * pPlanes[1].u_height, M4VSS3GPP,(M4OSA_Char *) "pPlanes[1].pac_data");
2932
2933 if( M4OSA_NULL == pPlanes[1].pac_data )
2934 {
2935 M4OSA_TRACE1_0(
2936 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\
2937 returning M4ERR_ALLOC");
2938 free((void *)pPlanes[0].pac_data);
2939 pPlanes[0].pac_data = M4OSA_NULL;
2940 return M4ERR_ALLOC;
2941 }
2942
2943 pPlanes[2].u_width = pPlanes[1].u_width;
2944 pPlanes[2].u_height = pPlanes[1].u_height;
2945 pPlanes[2].u_stride = pPlanes[2].u_width;
2946 pPlanes[2].u_topleft = 0;
2947 if (pPlanes[2].pac_data != M4OSA_NULL) {
2948 free(pPlanes[2].pac_data);
2949 pPlanes[2].pac_data = M4OSA_NULL;
2950 }
2951 pPlanes[2].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[2].u_stride
2952 * pPlanes[2].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[2].pac_data");
2953
2954 if( M4OSA_NULL == pPlanes[2].pac_data )
2955 {
2956 M4OSA_TRACE1_0(
2957 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\
2958 returning M4ERR_ALLOC");
2959 free((void *)pPlanes[0].pac_data);
2960 free((void *)pPlanes[1].pac_data);
2961 pPlanes[0].pac_data = M4OSA_NULL;
2962 pPlanes[1].pac_data = M4OSA_NULL;
2963 return M4ERR_ALLOC;
2964 }
2965
2966 memset((void *)pPlanes[0].pac_data, 0, pPlanes[0].u_stride*pPlanes[0].u_height);
2967 memset((void *)pPlanes[1].pac_data, 0, pPlanes[1].u_stride*pPlanes[1].u_height);
2968 memset((void *)pPlanes[2].pac_data, 0, pPlanes[2].u_stride*pPlanes[2].u_height);
2969 /**
2970 * Return */
2971 M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR");
2972 return M4NO_ERROR;
2973 }
2974
2975 /**
2976 ******************************************************************************
2977 * M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
2978 * M4OSA_FileReadPointer* pFileReadPtr,
2979 * M4VIFI_ImagePlane* pImagePlanes,
2980 * M4OSA_UInt32 width,
2981 * M4OSA_UInt32 height);
2982 * @brief It Coverts and resizes a ARGB8888 image to YUV420
2983 * @note
2984 * @param pFileIn (IN) The ARGB888 input file
2985 * @param pFileReadPtr (IN) Pointer on filesystem functions
2986 * @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user.
2987 * ARGB8888 image will be converted and resized to output
2988 * YUV420 plane size
2989 * @param width (IN) width of the ARGB8888
2990 * @param height (IN) height of the ARGB8888
2991 * @return M4NO_ERROR: No error
2992 * @return M4ERR_ALLOC: memory error
2993 * @return M4ERR_PARAMETER: At least one of the function parameters is null
2994 ******************************************************************************
2995 */
2996
M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void * pFileIn,M4OSA_FileReadPointer * pFileReadPtr,M4VIFI_ImagePlane * pImagePlanes,M4OSA_UInt32 width,M4OSA_UInt32 height)2997 M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
2998 M4OSA_FileReadPointer* pFileReadPtr,
2999 M4VIFI_ImagePlane* pImagePlanes,
3000 M4OSA_UInt32 width,M4OSA_UInt32 height) {
3001 M4OSA_Context pARGBIn;
3002 M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2;
3003 M4OSA_UInt32 frameSize_argb = width * height * 4;
3004 M4OSA_UInt32 frameSize_rgb888 = width * height * 3;
3005 M4OSA_UInt32 i = 0,j= 0;
3006 M4OSA_ERR err = M4NO_ERROR;
3007
3008 M4OSA_UInt8 *pArgbPlane =
3009 (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb,
3010 M4VS, (M4OSA_Char*)"argb data");
3011 if (pArgbPlane == M4OSA_NULL) {
3012 M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420: \
3013 Failed to allocate memory for ARGB plane");
3014 return M4ERR_ALLOC;
3015 }
3016
3017 /* Get file size */
3018 err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead);
3019 if (err != M4NO_ERROR) {
3020 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 : \
3021 Can not open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
3022 free(pArgbPlane);
3023 pArgbPlane = M4OSA_NULL;
3024 goto cleanup;
3025 }
3026
3027 err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pArgbPlane,
3028 &frameSize_argb);
3029 if (err != M4NO_ERROR) {
3030 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \
3031 Can not read ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
3032 pFileReadPtr->closeRead(pARGBIn);
3033 free(pArgbPlane);
3034 pArgbPlane = M4OSA_NULL;
3035 goto cleanup;
3036 }
3037
3038 err = pFileReadPtr->closeRead(pARGBIn);
3039 if(err != M4NO_ERROR) {
3040 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \
3041 Can not close ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
3042 free(pArgbPlane);
3043 pArgbPlane = M4OSA_NULL;
3044 goto cleanup;
3045 }
3046
3047 rgbPlane1.pac_data =
3048 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888,
3049 M4VS, (M4OSA_Char*)"RGB888 plane1");
3050 if(rgbPlane1.pac_data == M4OSA_NULL) {
3051 M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \
3052 Failed to allocate memory for rgb plane1");
3053 free(pArgbPlane);
3054 return M4ERR_ALLOC;
3055 }
3056
3057 rgbPlane1.u_height = height;
3058 rgbPlane1.u_width = width;
3059 rgbPlane1.u_stride = width*3;
3060 rgbPlane1.u_topleft = 0;
3061
3062
3063 /** Remove the alpha channel */
3064 for (i=0, j = 0; i < frameSize_argb; i++) {
3065 if ((i % 4) == 0) continue;
3066 rgbPlane1.pac_data[j] = pArgbPlane[i];
3067 j++;
3068 }
3069 free(pArgbPlane);
3070
3071 /**
3072 * Check if resizing is required with color conversion */
3073 if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) {
3074
3075 frameSize_rgb888 = pImagePlanes->u_width * pImagePlanes->u_height * 3;
3076 rgbPlane2.pac_data =
3077 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, M4VS,
3078 (M4OSA_Char*)"rgb Plane2");
3079 if(rgbPlane2.pac_data == M4OSA_NULL) {
3080 M4OSA_TRACE1_0("Failed to allocate memory for rgb plane2");
3081 free(rgbPlane1.pac_data);
3082 return M4ERR_ALLOC;
3083 }
3084 rgbPlane2.u_height = pImagePlanes->u_height;
3085 rgbPlane2.u_width = pImagePlanes->u_width;
3086 rgbPlane2.u_stride = pImagePlanes->u_width*3;
3087 rgbPlane2.u_topleft = 0;
3088
3089 /* Resizing */
3090 err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL,
3091 &rgbPlane1, &rgbPlane2);
3092 free(rgbPlane1.pac_data);
3093 if(err != M4NO_ERROR) {
3094 M4OSA_TRACE1_1("error resizing RGB888 to RGB888: 0x%x\n", err);
3095 free(rgbPlane2.pac_data);
3096 return err;
3097 }
3098
3099 /*Converting Resized RGB888 to YUV420 */
3100 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes);
3101 free(rgbPlane2.pac_data);
3102 if(err != M4NO_ERROR) {
3103 M4OSA_TRACE1_1("error converting from RGB888 to YUV: 0x%x\n", err);
3104 return err;
3105 }
3106 } else {
3107 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes);
3108 if(err != M4NO_ERROR) {
3109 M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err);
3110 }
3111 free(rgbPlane1.pac_data);
3112 }
3113 cleanup:
3114 M4OSA_TRACE3_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 exit");
3115 return err;
3116 }
3117
M4VSS3GPP_intApplyRenderingMode(M4VSS3GPP_InternalEditContext * pC,M4xVSS_MediaRendering renderingMode,M4VIFI_ImagePlane * pInplane,M4VIFI_ImagePlane * pOutplane)3118 M4OSA_ERR M4VSS3GPP_intApplyRenderingMode(M4VSS3GPP_InternalEditContext *pC,
3119 M4xVSS_MediaRendering renderingMode,
3120 M4VIFI_ImagePlane* pInplane,
3121 M4VIFI_ImagePlane* pOutplane) {
3122
3123 M4OSA_ERR err = M4NO_ERROR;
3124 M4AIR_Params airParams;
3125 M4VIFI_ImagePlane pImagePlanesTemp[3];
3126 M4OSA_UInt32 i = 0;
3127
3128 if (renderingMode == M4xVSS_kBlackBorders) {
3129 memset((void *)pOutplane[0].pac_data, Y_PLANE_BORDER_VALUE,
3130 (pOutplane[0].u_height*pOutplane[0].u_stride));
3131 memset((void *)pOutplane[1].pac_data, U_PLANE_BORDER_VALUE,
3132 (pOutplane[1].u_height*pOutplane[1].u_stride));
3133 memset((void *)pOutplane[2].pac_data, V_PLANE_BORDER_VALUE,
3134 (pOutplane[2].u_height*pOutplane[2].u_stride));
3135 }
3136
3137 if (renderingMode == M4xVSS_kResizing) {
3138 /**
3139 * Call the resize filter.
3140 * From the intermediate frame to the encoder image plane */
3141 err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL,
3142 pInplane, pOutplane);
3143 if (M4NO_ERROR != err) {
3144 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \
3145 M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err);
3146 return err;
3147 }
3148 } else {
3149 M4VIFI_ImagePlane* pPlaneTemp = M4OSA_NULL;
3150 M4OSA_UInt8* pOutPlaneY =
3151 pOutplane[0].pac_data + pOutplane[0].u_topleft;
3152 M4OSA_UInt8* pOutPlaneU =
3153 pOutplane[1].pac_data + pOutplane[1].u_topleft;
3154 M4OSA_UInt8* pOutPlaneV =
3155 pOutplane[2].pac_data + pOutplane[2].u_topleft;
3156 M4OSA_UInt8* pInPlaneY = M4OSA_NULL;
3157 M4OSA_UInt8* pInPlaneU = M4OSA_NULL;
3158 M4OSA_UInt8* pInPlaneV = M4OSA_NULL;
3159
3160 /* To keep media aspect ratio*/
3161 /* Initialize AIR Params*/
3162 airParams.m_inputCoord.m_x = 0;
3163 airParams.m_inputCoord.m_y = 0;
3164 airParams.m_inputSize.m_height = pInplane->u_height;
3165 airParams.m_inputSize.m_width = pInplane->u_width;
3166 airParams.m_outputSize.m_width = pOutplane->u_width;
3167 airParams.m_outputSize.m_height = pOutplane->u_height;
3168 airParams.m_bOutputStripe = M4OSA_FALSE;
3169 airParams.m_outputOrientation = M4COMMON_kOrientationTopLeft;
3170
3171 /**
3172 Media rendering: Black borders*/
3173 if (renderingMode == M4xVSS_kBlackBorders) {
3174 pImagePlanesTemp[0].u_width = pOutplane[0].u_width;
3175 pImagePlanesTemp[0].u_height = pOutplane[0].u_height;
3176 pImagePlanesTemp[0].u_stride = pOutplane[0].u_width;
3177 pImagePlanesTemp[0].u_topleft = 0;
3178
3179 pImagePlanesTemp[1].u_width = pOutplane[1].u_width;
3180 pImagePlanesTemp[1].u_height = pOutplane[1].u_height;
3181 pImagePlanesTemp[1].u_stride = pOutplane[1].u_width;
3182 pImagePlanesTemp[1].u_topleft = 0;
3183
3184 pImagePlanesTemp[2].u_width = pOutplane[2].u_width;
3185 pImagePlanesTemp[2].u_height = pOutplane[2].u_height;
3186 pImagePlanesTemp[2].u_stride = pOutplane[2].u_width;
3187 pImagePlanesTemp[2].u_topleft = 0;
3188
3189 /**
3190 * Allocates plan in local image plane structure */
3191 pImagePlanesTemp[0].pac_data =
3192 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(
3193 pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height,
3194 M4VS, (M4OSA_Char *)"pImagePlaneTemp Y") ;
3195 if (pImagePlanesTemp[0].pac_data == M4OSA_NULL) {
3196 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error");
3197 return M4ERR_ALLOC;
3198 }
3199 pImagePlanesTemp[1].pac_data =
3200 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(
3201 pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height,
3202 M4VS, (M4OSA_Char *)"pImagePlaneTemp U") ;
3203 if (pImagePlanesTemp[1].pac_data == M4OSA_NULL) {
3204 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error");
3205 free(pImagePlanesTemp[0].pac_data);
3206 return M4ERR_ALLOC;
3207 }
3208 pImagePlanesTemp[2].pac_data =
3209 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(
3210 pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height,
3211 M4VS, (M4OSA_Char *)"pImagePlaneTemp V") ;
3212 if (pImagePlanesTemp[2].pac_data == M4OSA_NULL) {
3213 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error");
3214 free(pImagePlanesTemp[0].pac_data);
3215 free(pImagePlanesTemp[1].pac_data);
3216 return M4ERR_ALLOC;
3217 }
3218
3219 pInPlaneY = pImagePlanesTemp[0].pac_data ;
3220 pInPlaneU = pImagePlanesTemp[1].pac_data ;
3221 pInPlaneV = pImagePlanesTemp[2].pac_data ;
3222
3223 memset((void *)pImagePlanesTemp[0].pac_data, Y_PLANE_BORDER_VALUE,
3224 (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride));
3225 memset((void *)pImagePlanesTemp[1].pac_data, U_PLANE_BORDER_VALUE,
3226 (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride));
3227 memset((void *)pImagePlanesTemp[2].pac_data, V_PLANE_BORDER_VALUE,
3228 (pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride));
3229
3230 M4OSA_UInt32 height =
3231 (pInplane->u_height * pOutplane->u_width) /pInplane->u_width;
3232
3233 if (height <= pOutplane->u_height) {
3234 /**
3235 * Black borders will be on the top and the bottom side */
3236 airParams.m_outputSize.m_width = pOutplane->u_width;
3237 airParams.m_outputSize.m_height = height;
3238 /**
3239 * Number of lines at the top */
3240 pImagePlanesTemp[0].u_topleft =
3241 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height -
3242 airParams.m_outputSize.m_height)>>1)) *
3243 pImagePlanesTemp[0].u_stride;
3244 pImagePlanesTemp[0].u_height = airParams.m_outputSize.m_height;
3245 pImagePlanesTemp[1].u_topleft =
3246 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height -
3247 (airParams.m_outputSize.m_height>>1)))>>1) *
3248 pImagePlanesTemp[1].u_stride;
3249 pImagePlanesTemp[1].u_height =
3250 airParams.m_outputSize.m_height>>1;
3251 pImagePlanesTemp[2].u_topleft =
3252 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height -
3253 (airParams.m_outputSize.m_height>>1)))>>1) *
3254 pImagePlanesTemp[2].u_stride;
3255 pImagePlanesTemp[2].u_height =
3256 airParams.m_outputSize.m_height>>1;
3257 } else {
3258 /**
3259 * Black borders will be on the left and right side */
3260 airParams.m_outputSize.m_height = pOutplane->u_height;
3261 airParams.m_outputSize.m_width =
3262 (M4OSA_UInt32)((pInplane->u_width * pOutplane->u_height)/pInplane->u_height);
3263
3264 pImagePlanesTemp[0].u_topleft =
3265 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width -
3266 airParams.m_outputSize.m_width)>>1));
3267 pImagePlanesTemp[0].u_width = airParams.m_outputSize.m_width;
3268 pImagePlanesTemp[1].u_topleft =
3269 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width -
3270 (airParams.m_outputSize.m_width>>1)))>>1);
3271 pImagePlanesTemp[1].u_width = airParams.m_outputSize.m_width>>1;
3272 pImagePlanesTemp[2].u_topleft =
3273 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width -
3274 (airParams.m_outputSize.m_width>>1)))>>1);
3275 pImagePlanesTemp[2].u_width = airParams.m_outputSize.m_width>>1;
3276 }
3277
3278 /**
3279 * Width and height have to be even */
3280 airParams.m_outputSize.m_width =
3281 (airParams.m_outputSize.m_width>>1)<<1;
3282 airParams.m_outputSize.m_height =
3283 (airParams.m_outputSize.m_height>>1)<<1;
3284 airParams.m_inputSize.m_width =
3285 (airParams.m_inputSize.m_width>>1)<<1;
3286 airParams.m_inputSize.m_height =
3287 (airParams.m_inputSize.m_height>>1)<<1;
3288 pImagePlanesTemp[0].u_width =
3289 (pImagePlanesTemp[0].u_width>>1)<<1;
3290 pImagePlanesTemp[1].u_width =
3291 (pImagePlanesTemp[1].u_width>>1)<<1;
3292 pImagePlanesTemp[2].u_width =
3293 (pImagePlanesTemp[2].u_width>>1)<<1;
3294 pImagePlanesTemp[0].u_height =
3295 (pImagePlanesTemp[0].u_height>>1)<<1;
3296 pImagePlanesTemp[1].u_height =
3297 (pImagePlanesTemp[1].u_height>>1)<<1;
3298 pImagePlanesTemp[2].u_height =
3299 (pImagePlanesTemp[2].u_height>>1)<<1;
3300
3301 /**
3302 * Check that values are coherent */
3303 if (airParams.m_inputSize.m_height ==
3304 airParams.m_outputSize.m_height) {
3305 airParams.m_inputSize.m_width =
3306 airParams.m_outputSize.m_width;
3307 } else if (airParams.m_inputSize.m_width ==
3308 airParams.m_outputSize.m_width) {
3309 airParams.m_inputSize.m_height =
3310 airParams.m_outputSize.m_height;
3311 }
3312 pPlaneTemp = pImagePlanesTemp;
3313 }
3314
3315 /**
3316 * Media rendering: Cropping*/
3317 if (renderingMode == M4xVSS_kCropping) {
3318 airParams.m_outputSize.m_height = pOutplane->u_height;
3319 airParams.m_outputSize.m_width = pOutplane->u_width;
3320 if ((airParams.m_outputSize.m_height *
3321 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width <
3322 airParams.m_inputSize.m_height) {
3323 /* Height will be cropped */
3324 airParams.m_inputSize.m_height =
3325 (M4OSA_UInt32)((airParams.m_outputSize.m_height *
3326 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width);
3327 airParams.m_inputSize.m_height =
3328 (airParams.m_inputSize.m_height>>1)<<1;
3329 airParams.m_inputCoord.m_y =
3330 (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_height -
3331 airParams.m_inputSize.m_height))>>1);
3332 } else {
3333 /* Width will be cropped */
3334 airParams.m_inputSize.m_width =
3335 (M4OSA_UInt32)((airParams.m_outputSize.m_width *
3336 airParams.m_inputSize.m_height)/airParams.m_outputSize.m_height);
3337 airParams.m_inputSize.m_width =
3338 (airParams.m_inputSize.m_width>>1)<<1;
3339 airParams.m_inputCoord.m_x =
3340 (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_width -
3341 airParams.m_inputSize.m_width))>>1);
3342 }
3343 pPlaneTemp = pOutplane;
3344 }
3345 /**
3346 * Call AIR functions */
3347 if (M4OSA_NULL == pC->m_air_context) {
3348 err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P);
3349 if(err != M4NO_ERROR) {
3350 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \
3351 M4AIR_create returned error 0x%x", err);
3352 goto cleanUp;
3353 }
3354 }
3355
3356 err = M4AIR_configure(pC->m_air_context, &airParams);
3357 if (err != M4NO_ERROR) {
3358 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \
3359 Error when configuring AIR: 0x%x", err);
3360 M4AIR_cleanUp(pC->m_air_context);
3361 goto cleanUp;
3362 }
3363
3364 err = M4AIR_get(pC->m_air_context, pInplane, pPlaneTemp);
3365 if (err != M4NO_ERROR) {
3366 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \
3367 Error when getting AIR plane: 0x%x", err);
3368 M4AIR_cleanUp(pC->m_air_context);
3369 goto cleanUp;
3370 }
3371
3372 if (renderingMode == M4xVSS_kBlackBorders) {
3373 for (i=0; i<pOutplane[0].u_height; i++) {
3374 memcpy((void *)pOutPlaneY, (void *)pInPlaneY,
3375 pOutplane[0].u_width);
3376 pInPlaneY += pOutplane[0].u_width;
3377 pOutPlaneY += pOutplane[0].u_stride;
3378 }
3379 for (i=0; i<pOutplane[1].u_height; i++) {
3380 memcpy((void *)pOutPlaneU, (void *)pInPlaneU,
3381 pOutplane[1].u_width);
3382 pInPlaneU += pOutplane[1].u_width;
3383 pOutPlaneU += pOutplane[1].u_stride;
3384 }
3385 for (i=0; i<pOutplane[2].u_height; i++) {
3386 memcpy((void *)pOutPlaneV, (void *)pInPlaneV,
3387 pOutplane[2].u_width);
3388 pInPlaneV += pOutplane[2].u_width;
3389 pOutPlaneV += pOutplane[2].u_stride;
3390 }
3391 }
3392 }
3393 cleanUp:
3394 if (renderingMode == M4xVSS_kBlackBorders) {
3395 for (i=0; i<3; i++) {
3396 if (pImagePlanesTemp[i].pac_data != M4OSA_NULL) {
3397 free(pImagePlanesTemp[i].pac_data);
3398 pImagePlanesTemp[i].pac_data = M4OSA_NULL;
3399 }
3400 }
3401 }
3402 return err;
3403 }
3404
M4VSS3GPP_intSetYuv420PlaneFromARGB888(M4VSS3GPP_InternalEditContext * pC,M4VSS3GPP_ClipContext * pClipCtxt)3405 M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 (
3406 M4VSS3GPP_InternalEditContext *pC,
3407 M4VSS3GPP_ClipContext* pClipCtxt) {
3408
3409 M4OSA_ERR err= M4NO_ERROR;
3410
3411 // Allocate memory for YUV plane
3412 pClipCtxt->pPlaneYuv =
3413 (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(
3414 3*sizeof(M4VIFI_ImagePlane), M4VS,
3415 (M4OSA_Char*)"pPlaneYuv");
3416
3417 if (pClipCtxt->pPlaneYuv == M4OSA_NULL) {
3418 return M4ERR_ALLOC;
3419 }
3420
3421 pClipCtxt->pPlaneYuv[0].u_height =
3422 pClipCtxt->pSettings->ClipProperties.uiStillPicHeight;
3423 pClipCtxt->pPlaneYuv[0].u_width =
3424 pClipCtxt->pSettings->ClipProperties.uiStillPicWidth;
3425 pClipCtxt->pPlaneYuv[0].u_stride = pClipCtxt->pPlaneYuv[0].u_width;
3426 pClipCtxt->pPlaneYuv[0].u_topleft = 0;
3427
3428 pClipCtxt->pPlaneYuv[0].pac_data =
3429 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
3430 pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width * 1.5,
3431 M4VS, (M4OSA_Char*)"imageClip YUV data");
3432 if (pClipCtxt->pPlaneYuv[0].pac_data == M4OSA_NULL) {
3433 free(pClipCtxt->pPlaneYuv);
3434 return M4ERR_ALLOC;
3435 }
3436
3437 pClipCtxt->pPlaneYuv[1].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1;
3438 pClipCtxt->pPlaneYuv[1].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1;
3439 pClipCtxt->pPlaneYuv[1].u_stride = pClipCtxt->pPlaneYuv[1].u_width;
3440 pClipCtxt->pPlaneYuv[1].u_topleft = 0;
3441 pClipCtxt->pPlaneYuv[1].pac_data = (M4VIFI_UInt8*)(
3442 pClipCtxt->pPlaneYuv[0].pac_data +
3443 pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width);
3444
3445 pClipCtxt->pPlaneYuv[2].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1;
3446 pClipCtxt->pPlaneYuv[2].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1;
3447 pClipCtxt->pPlaneYuv[2].u_stride = pClipCtxt->pPlaneYuv[2].u_width;
3448 pClipCtxt->pPlaneYuv[2].u_topleft = 0;
3449 pClipCtxt->pPlaneYuv[2].pac_data = (M4VIFI_UInt8*)(
3450 pClipCtxt->pPlaneYuv[1].pac_data +
3451 pClipCtxt->pPlaneYuv[1].u_height * pClipCtxt->pPlaneYuv[1].u_width);
3452
3453 err = M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 (
3454 pClipCtxt->pSettings->pFile,
3455 pC->pOsaFileReadPtr,
3456 pClipCtxt->pPlaneYuv,
3457 pClipCtxt->pSettings->ClipProperties.uiStillPicWidth,
3458 pClipCtxt->pSettings->ClipProperties.uiStillPicHeight);
3459 if (M4NO_ERROR != err) {
3460 free(pClipCtxt->pPlaneYuv[0].pac_data);
3461 free(pClipCtxt->pPlaneYuv);
3462 return err;
3463 }
3464
3465 // Set the YUV data to the decoder using setoption
3466 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption (
3467 pClipCtxt->pViDecCtxt,
3468 M4DECODER_kOptionID_DecYuvData,
3469 (M4OSA_DataOption)pClipCtxt->pPlaneYuv);
3470 if (M4NO_ERROR != err) {
3471 free(pClipCtxt->pPlaneYuv[0].pac_data);
3472 free(pClipCtxt->pPlaneYuv);
3473 return err;
3474 }
3475
3476 pClipCtxt->pSettings->ClipProperties.bSetImageData = M4OSA_TRUE;
3477
3478 // Allocate Yuv plane with effect
3479 pClipCtxt->pPlaneYuvWithEffect =
3480 (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(
3481 3*sizeof(M4VIFI_ImagePlane), M4VS,
3482 (M4OSA_Char*)"pPlaneYuvWithEffect");
3483 if (pClipCtxt->pPlaneYuvWithEffect == M4OSA_NULL) {
3484 free(pClipCtxt->pPlaneYuv[0].pac_data);
3485 free(pClipCtxt->pPlaneYuv);
3486 return M4ERR_ALLOC;
3487 }
3488
3489 pClipCtxt->pPlaneYuvWithEffect[0].u_height = pC->ewc.uiVideoHeight;
3490 pClipCtxt->pPlaneYuvWithEffect[0].u_width = pC->ewc.uiVideoWidth;
3491 pClipCtxt->pPlaneYuvWithEffect[0].u_stride = pC->ewc.uiVideoWidth;
3492 pClipCtxt->pPlaneYuvWithEffect[0].u_topleft = 0;
3493
3494 pClipCtxt->pPlaneYuvWithEffect[0].pac_data =
3495 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
3496 pC->ewc.uiVideoHeight * pC->ewc.uiVideoWidth * 1.5,
3497 M4VS, (M4OSA_Char*)"imageClip YUV data");
3498 if (pClipCtxt->pPlaneYuvWithEffect[0].pac_data == M4OSA_NULL) {
3499 free(pClipCtxt->pPlaneYuv[0].pac_data);
3500 free(pClipCtxt->pPlaneYuv);
3501 free(pClipCtxt->pPlaneYuvWithEffect);
3502 return M4ERR_ALLOC;
3503 }
3504
3505 pClipCtxt->pPlaneYuvWithEffect[1].u_height =
3506 pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1;
3507 pClipCtxt->pPlaneYuvWithEffect[1].u_width =
3508 pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1;
3509 pClipCtxt->pPlaneYuvWithEffect[1].u_stride =
3510 pClipCtxt->pPlaneYuvWithEffect[1].u_width;
3511 pClipCtxt->pPlaneYuvWithEffect[1].u_topleft = 0;
3512 pClipCtxt->pPlaneYuvWithEffect[1].pac_data = (M4VIFI_UInt8*)(
3513 pClipCtxt->pPlaneYuvWithEffect[0].pac_data +
3514 pClipCtxt->pPlaneYuvWithEffect[0].u_height * pClipCtxt->pPlaneYuvWithEffect[0].u_width);
3515
3516 pClipCtxt->pPlaneYuvWithEffect[2].u_height =
3517 pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1;
3518 pClipCtxt->pPlaneYuvWithEffect[2].u_width =
3519 pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1;
3520 pClipCtxt->pPlaneYuvWithEffect[2].u_stride =
3521 pClipCtxt->pPlaneYuvWithEffect[2].u_width;
3522 pClipCtxt->pPlaneYuvWithEffect[2].u_topleft = 0;
3523 pClipCtxt->pPlaneYuvWithEffect[2].pac_data = (M4VIFI_UInt8*)(
3524 pClipCtxt->pPlaneYuvWithEffect[1].pac_data +
3525 pClipCtxt->pPlaneYuvWithEffect[1].u_height * pClipCtxt->pPlaneYuvWithEffect[1].u_width);
3526
3527 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3528 pClipCtxt->pViDecCtxt, M4DECODER_kOptionID_YuvWithEffectContiguous,
3529 (M4OSA_DataOption)pClipCtxt->pPlaneYuvWithEffect);
3530 if (M4NO_ERROR != err) {
3531 free(pClipCtxt->pPlaneYuv[0].pac_data);
3532 free(pClipCtxt->pPlaneYuv);
3533 free(pClipCtxt->pPlaneYuvWithEffect);
3534 return err;
3535 }
3536
3537 return M4NO_ERROR;
3538 }
3539
M4VSS3GPP_intRenderFrameWithEffect(M4VSS3GPP_InternalEditContext * pC,M4VSS3GPP_ClipContext * pClipCtxt,M4_MediaTime ts,M4OSA_Bool bIsClip1,M4VIFI_ImagePlane * pResizePlane,M4VIFI_ImagePlane * pPlaneNoResize,M4VIFI_ImagePlane * pPlaneOut)3540 M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect(M4VSS3GPP_InternalEditContext *pC,
3541 M4VSS3GPP_ClipContext* pClipCtxt,
3542 M4_MediaTime ts,
3543 M4OSA_Bool bIsClip1,
3544 M4VIFI_ImagePlane *pResizePlane,
3545 M4VIFI_ImagePlane *pPlaneNoResize,
3546 M4VIFI_ImagePlane *pPlaneOut) {
3547
3548 M4OSA_ERR err = M4NO_ERROR;
3549 M4OSA_UInt8 numEffects = 0;
3550 M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL;
3551 M4OSA_UInt32 yuvFrameWidth = 0, yuvFrameHeight = 0;
3552 M4VIFI_ImagePlane* pTmp = M4OSA_NULL;
3553 M4VIFI_ImagePlane pTemp[3];
3554 M4OSA_UInt8 i = 0;
3555 M4OSA_Bool bSkipFramingEffect = M4OSA_FALSE;
3556
3557 memset((void *)pTemp, 0, 3*sizeof(M4VIFI_ImagePlane));
3558 /* Resize or rotate case */
3559 if (M4OSA_NULL != pClipCtxt->m_pPreResizeFrame) {
3560 /**
3561 * If we do modify the image, we need an intermediate image plane */
3562 err = M4VSS3GPP_intAllocateYUV420(pResizePlane,
3563 pClipCtxt->m_pPreResizeFrame[0].u_width,
3564 pClipCtxt->m_pPreResizeFrame[0].u_height);
3565 if (M4NO_ERROR != err) {
3566 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3567 M4VSS3GPP_intAllocateYUV420 returns 0x%x", err);
3568 return err;
3569 }
3570
3571 if ((pClipCtxt->pSettings->FileType ==
3572 M4VIDEOEDITING_kFileType_ARGB8888) &&
3573 (pC->nbActiveEffects == 0) &&
3574 (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_FALSE)) {
3575
3576 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3577 pClipCtxt->pViDecCtxt,
3578 M4DECODER_kOptionID_EnableYuvWithEffect,
3579 (M4OSA_DataOption)M4OSA_TRUE);
3580 if (M4NO_ERROR == err) {
3581 pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender(
3582 pClipCtxt->pViDecCtxt, &ts,
3583 pClipCtxt->pPlaneYuvWithEffect, M4OSA_TRUE);
3584 }
3585
3586 } else {
3587 if (pClipCtxt->pSettings->FileType ==
3588 M4VIDEOEDITING_kFileType_ARGB8888) {
3589 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3590 pClipCtxt->pViDecCtxt,
3591 M4DECODER_kOptionID_EnableYuvWithEffect,
3592 (M4OSA_DataOption)M4OSA_FALSE);
3593 }
3594 if (M4NO_ERROR == err) {
3595 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender(
3596 pClipCtxt->pViDecCtxt, &ts,
3597 pClipCtxt->m_pPreResizeFrame, M4OSA_TRUE);
3598 }
3599
3600 }
3601 if (M4NO_ERROR != err) {
3602 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3603 returns error 0x%x", err);
3604 return err;
3605 }
3606
3607 if (pClipCtxt->pSettings->FileType !=
3608 M4VIDEOEDITING_kFileType_ARGB8888) {
3609 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) {
3610 // Save width and height of un-rotated frame
3611 yuvFrameWidth = pClipCtxt->m_pPreResizeFrame[0].u_width;
3612 yuvFrameHeight = pClipCtxt->m_pPreResizeFrame[0].u_height;
3613 err = M4VSS3GPP_intRotateVideo(pClipCtxt->m_pPreResizeFrame,
3614 pClipCtxt->pSettings->ClipProperties.videoRotationDegrees);
3615 if (M4NO_ERROR != err) {
3616 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3617 rotateVideo() returns error 0x%x", err);
3618 return err;
3619 }
3620 /* Set the new video size for temporary buffer */
3621 M4VSS3GPP_intSetYUV420Plane(pResizePlane,
3622 pClipCtxt->m_pPreResizeFrame[0].u_width,
3623 pClipCtxt->m_pPreResizeFrame[0].u_height);
3624 }
3625 }
3626
3627 if (bIsClip1 == M4OSA_TRUE) {
3628 pC->bIssecondClip = M4OSA_FALSE;
3629 numEffects = pC->nbActiveEffects;
3630 } else {
3631 numEffects = pC->nbActiveEffects1;
3632 pC->bIssecondClip = M4OSA_TRUE;
3633 }
3634
3635 if ( numEffects > 0) {
3636 pClipCtxt->bGetYuvDataFromDecoder = M4OSA_TRUE;
3637 /* If video frame need to be resized or rotated,
3638 * then apply the overlay after the frame was rendered with rendering mode.
3639 * Here skip the framing(overlay) effect when applying video Effect. */
3640 bSkipFramingEffect = M4OSA_TRUE;
3641 err = M4VSS3GPP_intApplyVideoEffect(pC,
3642 pClipCtxt->m_pPreResizeFrame, pResizePlane, bSkipFramingEffect);
3643 if (M4NO_ERROR != err) {
3644 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3645 M4VSS3GPP_intApplyVideoEffect() err 0x%x", err);
3646 return err;
3647 }
3648 pDecoderRenderFrame= pResizePlane;
3649 } else {
3650 pDecoderRenderFrame = pClipCtxt->m_pPreResizeFrame;
3651 }
3652 /* Do rendering mode */
3653 if ((pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE) ||
3654 (pClipCtxt->pSettings->FileType !=
3655 M4VIDEOEDITING_kFileType_ARGB8888)) {
3656 if (bIsClip1 == M4OSA_TRUE) {
3657 if (pC->bClip1ActiveFramingEffect == M4OSA_TRUE) {
3658 err = M4VSS3GPP_intAllocateYUV420(pTemp,
3659 pPlaneOut[0].u_width, pPlaneOut[0].u_height);
3660 if (M4NO_ERROR != err) {
3661 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
3662 M4VSS3GPP_intAllocateYUV420 error 0x%x", err);
3663 pC->ewc.VppError = err;
3664 return M4NO_ERROR;
3665 }
3666 pTmp = pTemp;
3667 } else {
3668 pTmp = pC->yuv1;
3669 }
3670 err = M4VSS3GPP_intApplyRenderingMode (pC,
3671 pClipCtxt->pSettings->xVSS.MediaRendering,
3672 pDecoderRenderFrame,pTmp);
3673 } else {
3674 if (pC->bClip2ActiveFramingEffect == M4OSA_TRUE) {
3675 err = M4VSS3GPP_intAllocateYUV420(pTemp,
3676 pPlaneOut[0].u_width, pPlaneOut[0].u_height);
3677 if (M4NO_ERROR != err) {
3678 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
3679 M4VSS3GPP_intAllocateYUV420 error 0x%x", err);
3680 pC->ewc.VppError = err;
3681 return M4NO_ERROR;
3682 }
3683 pTmp = pTemp;
3684 } else {
3685 pTmp = pC->yuv2;
3686 }
3687 err = M4VSS3GPP_intApplyRenderingMode (pC,
3688 pClipCtxt->pSettings->xVSS.MediaRendering,
3689 pDecoderRenderFrame,pTmp);
3690 }
3691 if (M4NO_ERROR != err) {
3692 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3693 M4VSS3GPP_intApplyRenderingMode error 0x%x ", err);
3694 for (i=0; i<3; i++) {
3695 if (pTemp[i].pac_data != M4OSA_NULL) {
3696 free(pTemp[i].pac_data);
3697 pTemp[i].pac_data = M4OSA_NULL;
3698 }
3699 }
3700 return err;
3701 }
3702 /* Apply overlay if overlay exist*/
3703 if (bIsClip1 == M4OSA_TRUE) {
3704 if (pC->bClip1ActiveFramingEffect == M4OSA_TRUE) {
3705 err = M4VSS3GPP_intApplyVideoOverlay(pC,
3706 pTemp, pC->yuv1);
3707 }
3708 pClipCtxt->lastDecodedPlane = pC->yuv1;
3709 } else {
3710 if (pC->bClip2ActiveFramingEffect == M4OSA_TRUE) {
3711 err = M4VSS3GPP_intApplyVideoOverlay(pC,
3712 pTemp, pC->yuv2);
3713 }
3714 pClipCtxt->lastDecodedPlane = pC->yuv2;
3715 }
3716 if (M4NO_ERROR != err) {
3717 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
3718 M4VSS3GPP_intApplyVideoOverlay) error 0x%x ", err);
3719 pC->ewc.VppError = err;
3720 for (i=0; i<3; i++) {
3721 if (pTemp[i].pac_data != M4OSA_NULL) {
3722 free(pTemp[i].pac_data);
3723 pTemp[i].pac_data = M4OSA_NULL;
3724 }
3725 }
3726 return M4NO_ERROR;
3727 }
3728 } else {
3729 pClipCtxt->lastDecodedPlane = pClipCtxt->pPlaneYuvWithEffect;
3730 }
3731 // free the temp buffer
3732 for (i=0; i<3; i++) {
3733 if (pTemp[i].pac_data != M4OSA_NULL) {
3734 free(pTemp[i].pac_data);
3735 pTemp[i].pac_data = M4OSA_NULL;
3736 }
3737 }
3738
3739 if ((pClipCtxt->pSettings->FileType ==
3740 M4VIDEOEDITING_kFileType_ARGB8888) &&
3741 (pC->nbActiveEffects == 0) &&
3742 (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE)) {
3743 if (bIsClip1 == M4OSA_TRUE) {
3744 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3745 pClipCtxt->pViDecCtxt,
3746 M4DECODER_kOptionID_YuvWithEffectNonContiguous,
3747 (M4OSA_DataOption)pC->yuv1);
3748 } else {
3749 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3750 pClipCtxt->pViDecCtxt,
3751 M4DECODER_kOptionID_YuvWithEffectNonContiguous,
3752 (M4OSA_DataOption)pC->yuv2);
3753 }
3754 if (M4NO_ERROR != err) {
3755 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3756 null decoder setOption error 0x%x ", err);
3757 return err;
3758 }
3759 pClipCtxt->bGetYuvDataFromDecoder = M4OSA_FALSE;
3760 }
3761
3762 // Reset original width and height for resize frame plane
3763 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees &&
3764 180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) {
3765
3766 M4VSS3GPP_intSetYUV420Plane(pClipCtxt->m_pPreResizeFrame,
3767 yuvFrameWidth, yuvFrameHeight);
3768 }
3769
3770 } else {
3771 /* No rotate or no resize case*/
3772 if (bIsClip1 == M4OSA_TRUE) {
3773 numEffects = pC->nbActiveEffects;
3774 } else {
3775 numEffects = pC->nbActiveEffects1;
3776 }
3777
3778 if(numEffects > 0) {
3779 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender(
3780 pClipCtxt->pViDecCtxt, &ts, pPlaneNoResize, M4OSA_TRUE);
3781 if (M4NO_ERROR != err) {
3782 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3783 Render returns error 0x%x", err);
3784 return err;
3785 }
3786
3787 bSkipFramingEffect = M4OSA_FALSE;
3788 if (bIsClip1 == M4OSA_TRUE) {
3789 pC->bIssecondClip = M4OSA_FALSE;
3790 err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize,
3791 pC->yuv1, bSkipFramingEffect);
3792 pClipCtxt->lastDecodedPlane = pC->yuv1;
3793 } else {
3794 pC->bIssecondClip = M4OSA_TRUE;
3795 err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize,
3796 pC->yuv2, bSkipFramingEffect);
3797 pClipCtxt->lastDecodedPlane = pC->yuv2;
3798 }
3799
3800 if (M4NO_ERROR != err) {
3801 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3802 M4VSS3GPP_intApplyVideoEffect error 0x%x", err);
3803 return err;
3804 }
3805 } else {
3806
3807 if (bIsClip1 == M4OSA_TRUE) {
3808 pTmp = pC->yuv1;
3809 } else {
3810 pTmp = pC->yuv2;
3811 }
3812 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender(
3813 pClipCtxt->pViDecCtxt, &ts, pTmp, M4OSA_TRUE);
3814 if (M4NO_ERROR != err) {
3815 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3816 Render returns error 0x%x,", err);
3817 return err;
3818 }
3819 pClipCtxt->lastDecodedPlane = pTmp;
3820 }
3821 pClipCtxt->iVideoRenderCts = (M4OSA_Int32)ts;
3822 }
3823
3824 return err;
3825 }
3826
M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane * pPlaneIn,M4OSA_UInt32 rotationDegree)3827 M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn,
3828 M4OSA_UInt32 rotationDegree) {
3829
3830 M4OSA_ERR err = M4NO_ERROR;
3831 M4VIFI_ImagePlane outPlane[3];
3832
3833 if (rotationDegree != 180) {
3834 // Swap width and height of in plane
3835 outPlane[0].u_width = pPlaneIn[0].u_height;
3836 outPlane[0].u_height = pPlaneIn[0].u_width;
3837 outPlane[0].u_stride = outPlane[0].u_width;
3838 outPlane[0].u_topleft = 0;
3839 outPlane[0].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc(
3840 (outPlane[0].u_stride*outPlane[0].u_height), M4VS,
3841 (M4OSA_Char*)("out Y plane for rotation"));
3842 if (outPlane[0].pac_data == M4OSA_NULL) {
3843 return M4ERR_ALLOC;
3844 }
3845
3846 outPlane[1].u_width = pPlaneIn[0].u_height/2;
3847 outPlane[1].u_height = pPlaneIn[0].u_width/2;
3848 outPlane[1].u_stride = outPlane[1].u_width;
3849 outPlane[1].u_topleft = 0;
3850 outPlane[1].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc(
3851 (outPlane[1].u_stride*outPlane[1].u_height), M4VS,
3852 (M4OSA_Char*)("out U plane for rotation"));
3853 if (outPlane[1].pac_data == M4OSA_NULL) {
3854 free((void *)outPlane[0].pac_data);
3855 return M4ERR_ALLOC;
3856 }
3857
3858 outPlane[2].u_width = pPlaneIn[0].u_height/2;
3859 outPlane[2].u_height = pPlaneIn[0].u_width/2;
3860 outPlane[2].u_stride = outPlane[2].u_width;
3861 outPlane[2].u_topleft = 0;
3862 outPlane[2].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc(
3863 (outPlane[2].u_stride*outPlane[2].u_height), M4VS,
3864 (M4OSA_Char*)("out V plane for rotation"));
3865 if (outPlane[2].pac_data == M4OSA_NULL) {
3866 free((void *)outPlane[0].pac_data);
3867 free((void *)outPlane[1].pac_data);
3868 return M4ERR_ALLOC;
3869 }
3870 }
3871
3872 switch(rotationDegree) {
3873 case 90:
3874 M4VIFI_Rotate90RightYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane);
3875 break;
3876
3877 case 180:
3878 // In plane rotation, so planeOut = planeIn
3879 M4VIFI_Rotate180YUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneIn);
3880 break;
3881
3882 case 270:
3883 M4VIFI_Rotate90LeftYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane);
3884 break;
3885
3886 default:
3887 M4OSA_TRACE1_1("invalid rotation param %d", (int)rotationDegree);
3888 err = M4ERR_PARAMETER;
3889 break;
3890 }
3891
3892 if (rotationDegree != 180) {
3893 memset((void *)pPlaneIn[0].pac_data, 0,
3894 (pPlaneIn[0].u_width*pPlaneIn[0].u_height));
3895 memset((void *)pPlaneIn[1].pac_data, 0,
3896 (pPlaneIn[1].u_width*pPlaneIn[1].u_height));
3897 memset((void *)pPlaneIn[2].pac_data, 0,
3898 (pPlaneIn[2].u_width*pPlaneIn[2].u_height));
3899 // Copy Y, U and V planes
3900 memcpy((void *)pPlaneIn[0].pac_data, (void *)outPlane[0].pac_data,
3901 (pPlaneIn[0].u_width*pPlaneIn[0].u_height));
3902 memcpy((void *)pPlaneIn[1].pac_data, (void *)outPlane[1].pac_data,
3903 (pPlaneIn[1].u_width*pPlaneIn[1].u_height));
3904 memcpy((void *)pPlaneIn[2].pac_data, (void *)outPlane[2].pac_data,
3905 (pPlaneIn[2].u_width*pPlaneIn[2].u_height));
3906
3907 free((void *)outPlane[0].pac_data);
3908 free((void *)outPlane[1].pac_data);
3909 free((void *)outPlane[2].pac_data);
3910
3911 // Swap the width and height of the in plane
3912 uint32_t temp = 0;
3913 temp = pPlaneIn[0].u_width;
3914 pPlaneIn[0].u_width = pPlaneIn[0].u_height;
3915 pPlaneIn[0].u_height = temp;
3916 pPlaneIn[0].u_stride = pPlaneIn[0].u_width;
3917
3918 temp = pPlaneIn[1].u_width;
3919 pPlaneIn[1].u_width = pPlaneIn[1].u_height;
3920 pPlaneIn[1].u_height = temp;
3921 pPlaneIn[1].u_stride = pPlaneIn[1].u_width;
3922
3923 temp = pPlaneIn[2].u_width;
3924 pPlaneIn[2].u_width = pPlaneIn[2].u_height;
3925 pPlaneIn[2].u_height = temp;
3926 pPlaneIn[2].u_stride = pPlaneIn[2].u_width;
3927 }
3928
3929 return err;
3930 }
3931
M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane * planeIn,M4OSA_UInt32 width,M4OSA_UInt32 height)3932 M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn,
3933 M4OSA_UInt32 width, M4OSA_UInt32 height) {
3934
3935 M4OSA_ERR err = M4NO_ERROR;
3936
3937 if (planeIn == M4OSA_NULL) {
3938 M4OSA_TRACE1_0("NULL in plane, error");
3939 return M4ERR_PARAMETER;
3940 }
3941
3942 planeIn[0].u_width = width;
3943 planeIn[0].u_height = height;
3944 planeIn[0].u_stride = planeIn[0].u_width;
3945
3946 planeIn[1].u_width = width/2;
3947 planeIn[1].u_height = height/2;
3948 planeIn[1].u_stride = planeIn[1].u_width;
3949
3950 planeIn[2].u_width = width/2;
3951 planeIn[2].u_height = height/2;
3952 planeIn[2].u_stride = planeIn[1].u_width;
3953
3954 return err;
3955 }
3956