• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package com.android.mediadump;
18 
19 import java.io.IOException;
20 import java.io.BufferedOutputStream;
21 import java.io.BufferedWriter;
22 import java.io.File;
23 import java.io.FileWriter;
24 import java.io.FilenameFilter;
25 import java.io.FileOutputStream;
26 import java.io.File;
27 
28 import java.lang.Integer;
29 import java.lang.Math;
30 import java.nio.ByteBuffer;
31 import java.nio.ByteOrder;
32 import java.nio.FloatBuffer;
33 import java.nio.channels.FileChannel;
34 import java.nio.IntBuffer;
35 import java.util.Properties;
36 
37 import javax.microedition.khronos.egl.EGLConfig;
38 import javax.microedition.khronos.opengles.GL10;
39 
40 import android.app.Activity;
41 import android.content.Context;
42 import android.content.pm.ActivityInfo;
43 import android.graphics.SurfaceTexture;
44 import android.media.MediaPlayer;
45 import android.opengl.GLES20;
46 import android.opengl.GLSurfaceView;
47 import android.opengl.GLUtils;
48 import android.opengl.Matrix;
49 import android.os.Bundle;
50 import android.util.Log;
51 import android.view.MotionEvent;
52 import android.view.Surface;
53 import android.view.SurfaceHolder;
54 import android.view.View;
55 import android.widget.MediaController;
56 import android.widget.MediaController.MediaPlayerControl;
57 
58 /**
59  * A view to play a video, specified by VideoDumpConfig.VIDEO_URI, and dump the screen
60  * into raw RGB files.
61  * It uses a renderer to display each video frame over a surface texture, read pixels,
62  * and writes the pixels into a rgb file on sdcard.
63  * Those raw rgb files will be used to compare the quality distortion against
64  * the original video. They can be viewed with the RgbPlayer app for debugging.
65  */
66 class VideoDumpView extends GLSurfaceView implements MediaPlayerControl {
67     private static final String TAG = "VideoDumpView";
68     VideoDumpRenderer mRenderer;
69     private MediaController mMediaController;
70     private boolean mMediaControllerAttached = false;
71     private MediaPlayer mMediaPlayer = null;
72     private BufferedWriter mImageListWriter = null;
73 
74     // A serials of configuration constants.
75     class VideoDumpConfig {
76         // Currently we are running with a local copy of the video.
77         // It should work with a "http://" sort of streaming url as well.
78         public static final String VIDEO_URI = "/sdcard/mediadump/sample.mp4";
79         public static final String ROOT_DIR = "/sdcard/mediadump/";
80         public static final String IMAGES_LIST = "images.lst";
81         public static final String IMAGE_PREFIX = "img";
82         public static final String IMAGE_SUFFIX = ".rgb";
83         public static final String PROPERTY_FILE = "prop.xml";
84 
85         // So far, glReadPixels only supports two (format, type) combinations
86         //     GL_RGB  GL_UNSIGNED_SHORT_5_6_5   16 bits per pixel (default)
87         //     GL_RGBA GL_UNSIGNED_BYTE          32 bits per pixel
88         public static final int PIXEL_FORMAT = GLES20.GL_RGB;
89         public static final int PIXEL_TYPE = PIXEL_FORMAT == GLES20.GL_RGBA
90                 ? GLES20.GL_UNSIGNED_BYTE : GLES20.GL_UNSIGNED_SHORT_5_6_5;
91         public static final int BYTES_PER_PIXEL =
92                 PIXEL_FORMAT == GLES20.GL_RGBA ? 4 : 2;
93         public static final boolean SET_CHOOSER
94                 = PIXEL_FORMAT == GLES20.GL_RGBA ? true : false;
95 
96         // On Motorola Xoom, it takes 100ms to read pixels and 180ms to write to a file
97         // to dump a complete 720p(1280*720) video frame. It's much slower than the frame
98         // playback interval (40ms). So we only dump a center block and it should be able
99         // to catch all the e2e distortion. A reasonable size of the block is 256x256,
100         // which takes 4ms to read pixels and 25 ms to write to a file.
101         public static final int MAX_DUMP_WIDTH = 256;
102         public static final int MAX_DUMP_HEIGHT = 256;
103 
104         // TODO: MediaPlayer doesn't give back the video frame rate and we'll need to
105         // figure it by dividing the total number of frames by the duration.
106         public static final int FRAME_RATE = 25;
107     }
108 
VideoDumpView(Context context)109     public VideoDumpView(Context context) {
110         super(context);
111         setEGLContextClientVersion(2);
112         // GLSurfaceView uses RGB_5_6_5 by default.
113         if (VideoDumpConfig.SET_CHOOSER) {
114             setEGLConfigChooser(8, 8, 8, 8, 8, 8);
115         }
116         mRenderer = new VideoDumpRenderer(context);
117         setRenderer(mRenderer);
118     }
119 
120     @Override
onPause()121     public void onPause() {
122         stopPlayback();
123         super.onPause();
124     }
125 
126     @Override
onResume()127     public void onResume() {
128         Log.d(TAG, "onResume");
129 
130         mMediaPlayer = new MediaPlayer();
131         try {
132             mMediaPlayer.setDataSource(VideoDumpConfig.VIDEO_URI);
133 
134             class RGBFilter implements FilenameFilter {
135                 public boolean accept(File dir, String name) {
136                     return (name.endsWith(VideoDumpConfig.IMAGE_SUFFIX));
137                 }
138             }
139             File dump_dir = new File(VideoDumpConfig.ROOT_DIR);
140             File[] dump_files = dump_dir.listFiles(new RGBFilter());
141             for (File dump_file :dump_files) {
142                 dump_file.delete();
143             }
144 
145             File image_list = new File(VideoDumpConfig.ROOT_DIR
146                                        + VideoDumpConfig.IMAGES_LIST);
147             image_list.delete();
148             mImageListWriter = new BufferedWriter(new FileWriter(image_list));
149         } catch (java.io.IOException e) {
150             Log.e(TAG, e.getMessage(), e);
151         }
152 
153         queueEvent(new Runnable(){
154                 public void run() {
155                     mRenderer.setMediaPlayer(mMediaPlayer);
156                     mRenderer.setImageListWriter(mImageListWriter);
157                 }});
158 
159         super.onResume();
160     }
161 
start()162     public void start() {
163         mMediaPlayer.start();
164     }
165 
pause()166     public void pause() {
167         mMediaPlayer.pause();
168         try {
169             mImageListWriter.flush();
170         } catch (java.io.IOException e) {
171             Log.e(TAG, e.getMessage(), e);
172         }
173     }
174 
stopPlayback()175     public void stopPlayback() {
176         Log.d(TAG, "stopPlayback");
177 
178         if (mMediaPlayer != null) {
179             mMediaPlayer.stop();
180             mMediaPlayer.release();
181             mMediaPlayer = null;
182         }
183         if (mImageListWriter != null) {
184             try {
185                 mImageListWriter.flush();
186                 mImageListWriter.close();
187             } catch (java.io.IOException e) {
188                 Log.e(TAG, e.getMessage(), e);
189             }
190         } else {
191             Log.d(TAG, "image list file was not written successfully.");
192         }
193     }
194 
setMediaController(MediaController controller)195     public void setMediaController(MediaController controller) {
196         if (mMediaController != null) {
197             mMediaController.hide();
198         }
199         mMediaController = controller;
200     }
201 
attachMediaController()202     private void attachMediaController() {
203         if (mMediaPlayer != null && mMediaController != null) {
204             if (!mMediaControllerAttached) {
205                 mMediaController.setMediaPlayer(this);
206                 View anchorView = this.getParent() instanceof View ?
207                         (View)this.getParent() : this;
208                 mMediaController.setAnchorView(anchorView);
209                 mMediaController.setEnabled(true);
210                 mMediaControllerAttached = true;
211             }
212             mMediaController.show();
213         }
214     }
215 
isInPlaybackState()216     private boolean isInPlaybackState() {
217         return (mMediaPlayer != null && mMediaPlayer.isPlaying());
218     }
219 
canPause()220     public boolean canPause () {
221         return true;
222     }
223 
canSeekBackward()224     public boolean canSeekBackward () {
225         return true;
226     }
227 
canSeekForward()228     public boolean canSeekForward () {
229         return true;
230     }
231 
getBufferPercentage()232     public int getBufferPercentage () {
233         return 1;
234     }
235 
getCurrentPosition()236     public int getCurrentPosition () {
237         if (isInPlaybackState()) {
238             return mMediaPlayer.getCurrentPosition();
239         }
240         return 0;
241     }
242 
getDuration()243     public int getDuration () {
244         return mMediaPlayer.getDuration();
245     }
246 
isPlaying()247     public boolean isPlaying () {
248         return isInPlaybackState() && mMediaPlayer.isPlaying();
249     }
250 
seekTo(int pos)251     public void seekTo (int pos) {
252         mMediaPlayer.seekTo(pos);
253     }
254 
255     @Override
getAudioSessionId()256     public int getAudioSessionId() {
257         return 0;
258     }
259 
260     @Override
onTouchEvent(MotionEvent ev)261     public boolean onTouchEvent(MotionEvent ev) {
262         attachMediaController();
263         return true;
264     }
265 
266     /**
267      * A renderer to read each video frame from a media player, draw it over a surface
268      * texture, dump the on-screen pixels into a buffer, and writes the pixels into
269      * a rgb file on sdcard.
270      */
271     private static class VideoDumpRenderer
272         implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
273         private static String TAG = "VideoDumpRenderer";
274 
275         /* All GL related fields from
276          * http://developer.android.com/resources/samples/ApiDemos/src/com/example
277          * /android/apis/graphics/GLES20TriangleRenderer.html
278          */
279         private static final int FLOAT_SIZE_BYTES = 4;
280         private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
281         private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
282         private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
283         private final float[] mTriangleVerticesData = {
284             // X, Y, Z, U, V
285             -1.0f, -1.0f, 0, 0.f, 0.f,
286             1.0f, -1.0f, 0, 1.f, 0.f,
287             -1.0f,  1.0f, 0, 0.f, 1.f,
288             1.0f,  1.0f, 0, 1.f, 1.f,
289         };
290 
291         private FloatBuffer mTriangleVertices;
292 
293         private final String mVertexShader =
294                 "uniform mat4 uMVPMatrix;\n" +
295                 "uniform mat4 uSTMatrix;\n" +
296                 "attribute vec4 aPosition;\n" +
297                 "attribute vec4 aTextureCoord;\n" +
298                 "varying vec2 vTextureCoord;\n" +
299                 "void main() {\n" +
300                 "  gl_Position = uMVPMatrix * aPosition;\n" +
301                 "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
302                 "}\n";
303 
304         private final String mFragmentShader =
305                 "#extension GL_OES_EGL_image_external : require\n" +
306                 "precision mediump float;\n" +
307                 "varying vec2 vTextureCoord;\n" +
308                 "uniform samplerExternalOES sTexture;\n" +
309                 "void main() {\n" +
310                 "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
311                 "}\n";
312 
313         private float[] mMVPMatrix = new float[16];
314         private float[] mSTMatrix = new float[16];
315 
316         private int mProgram;
317         private int mTextureID;
318         private int muMVPMatrixHandle;
319         private int muSTMatrixHandle;
320         private int maPositionHandle;
321         private int maTextureHandle;
322 
323         private SurfaceTexture mSurface;
324         private boolean updateSurface = false;
325 
326         // Magic key
327         private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
328 
329 
330         /**
331          * Fields that reads video source and dumps to file.
332          */
333         // The media player that loads and decodes the video.
334         // Not owned by this class.
335         private MediaPlayer mMediaPlayer;
336         // The frame number from media player.
337         private int mFrameNumber = 0;
338         // The frame number that is drawing on screen.
339         private int mDrawNumber = 0;
340         // The width and height of dumping block.
341         private int mWidth = 0;
342         private int mHeight = 0;
343         // The offset of the dumping block.
344         private int mStartX = 0;
345         private int mStartY = 0;
346         // A buffer to hold the dumping pixels.
347         private ByteBuffer mBuffer = null;
348         // A file writer to write the filenames of images.
349         private BufferedWriter mImageListWriter;
350 
VideoDumpRenderer(Context context)351         public VideoDumpRenderer(Context context) {
352             mTriangleVertices = ByteBuffer.allocateDirect(
353                 mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
354                     .order(ByteOrder.nativeOrder()).asFloatBuffer();
355             mTriangleVertices.put(mTriangleVerticesData).position(0);
356 
357             Matrix.setIdentityM(mSTMatrix, 0);
358         }
359 
setMediaPlayer(MediaPlayer player)360         public void setMediaPlayer(MediaPlayer player) {
361             mMediaPlayer = player;
362         }
363 
setImageListWriter(BufferedWriter imageListWriter)364         public void setImageListWriter(BufferedWriter imageListWriter) {
365             mImageListWriter = imageListWriter;
366         }
367 
368         /**
369          * Called to draw the current frame.
370          * This method is responsible for drawing the current frame.
371          */
onDrawFrame(GL10 glUnused)372         public void onDrawFrame(GL10 glUnused) {
373             boolean isNewFrame = false;
374             int frameNumber = 0;
375 
376             synchronized(this) {
377                 if (updateSurface) {
378                     isNewFrame = true;
379                     frameNumber = mFrameNumber;
380                     mSurface.updateTexImage();
381                     mSurface.getTransformMatrix(mSTMatrix);
382                     updateSurface = false;
383                 }
384             }
385 
386             // Initial clear.
387             GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
388             GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
389 
390             // Load the program, which is the basics rules to draw the vertexes and textures.
391             GLES20.glUseProgram(mProgram);
392             checkGlError("glUseProgram");
393 
394             // Activate the texture.
395             GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
396             GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
397 
398             // Load the vertexes coordinates. Simple here since it only draw a rectangle
399             // that fits the whole screen.
400             mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
401             GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
402                 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
403             checkGlError("glVertexAttribPointer maPosition");
404             GLES20.glEnableVertexAttribArray(maPositionHandle);
405             checkGlError("glEnableVertexAttribArray maPositionHandle");
406 
407             // Load the texture coordinates, which is essentially a rectangle that fits
408             // the whole video frame.
409             mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
410             GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
411                 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
412             checkGlError("glVertexAttribPointer maTextureHandle");
413             GLES20.glEnableVertexAttribArray(maTextureHandle);
414             checkGlError("glEnableVertexAttribArray maTextureHandle");
415 
416             // Set up the GL matrices.
417             Matrix.setIdentityM(mMVPMatrix, 0);
418             GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
419             GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
420 
421             // Draw a rectangle and render the video frame as a texture on it.
422             GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
423             checkGlError("glDrawArrays");
424             GLES20.glFinish();
425 
426             if (isNewFrame) {  // avoid duplicates.
427                 Log.d(TAG, mDrawNumber + "/" + frameNumber + " before dumping "
428                       + System.currentTimeMillis());
429                 DumpToFile(frameNumber);
430                 Log.d(TAG, mDrawNumber + "/" + frameNumber + " after  dumping "
431                       + System.currentTimeMillis());
432 
433                 mDrawNumber++;
434             }
435         }
436 
437         // Call the GL function that dumps the screen into a buffer, then write to a file.
DumpToFile(int frameNumber)438         private void DumpToFile(int frameNumber) {
439             GLES20.glReadPixels(mStartX, mStartY, mWidth, mHeight,
440                                 VideoDumpConfig.PIXEL_FORMAT,
441                                 VideoDumpConfig.PIXEL_TYPE,
442                                 mBuffer);
443             checkGlError("glReadPixels");
444 
445             Log.d(TAG, mDrawNumber + "/" + frameNumber + " after  glReadPixels "
446                   + System.currentTimeMillis());
447 
448             String filename =  VideoDumpConfig.ROOT_DIR + VideoDumpConfig.IMAGE_PREFIX
449                     + frameNumber + VideoDumpConfig.IMAGE_SUFFIX;
450             try {
451                 mImageListWriter.write(filename);
452                 mImageListWriter.newLine();
453                 FileOutputStream fos = new FileOutputStream(filename);
454                 fos.write(mBuffer.array());
455                 fos.close();
456             } catch (java.io.IOException e) {
457                 Log.e(TAG, e.getMessage(), e);
458             }
459         }
460 
461         /**
462          * Called when the surface changed size.
463          * Called after the surface is created and whenever the OpenGL surface size changes.
464          */
onSurfaceChanged(GL10 glUnused, int width, int height)465         public void onSurfaceChanged(GL10 glUnused, int width, int height) {
466             Log.d(TAG, "Surface size: " + width + "x" + height);
467 
468             int video_width = mMediaPlayer.getVideoWidth();
469             int video_height = mMediaPlayer.getVideoHeight();
470             Log.d(TAG, "Video size: " + video_width
471                   + "x" + video_height);
472 
473             // TODO: adjust video_width and video_height with the surface size.
474             GLES20.glViewport(0, 0, video_width, video_height);
475 
476             mWidth = Math.min(VideoDumpConfig.MAX_DUMP_WIDTH, video_width);
477             mHeight = Math.min(VideoDumpConfig.MAX_DUMP_HEIGHT, video_height);
478             mStartX = video_width / mWidth / 2 * mWidth;
479             mStartY = video_height / mHeight / 2 * mHeight;
480 
481             Log.d(TAG, "dumping block start at (" + mStartX + "," + mStartY + ") "
482                   + "size " + mWidth + "x" + mHeight);
483 
484             int image_size = mWidth * mHeight * VideoDumpConfig.BYTES_PER_PIXEL;
485             mBuffer = ByteBuffer.allocate(image_size);
486 
487             int bpp[] = new int[3];
488             GLES20.glGetIntegerv(GLES20.GL_RED_BITS, bpp, 0);
489             GLES20.glGetIntegerv(GLES20.GL_GREEN_BITS, bpp, 1);
490             GLES20.glGetIntegerv(GLES20.GL_BLUE_BITS, bpp, 2);
491             Log.d(TAG, "rgb bits: " + bpp[0] + "-" + bpp[1] + "-" + bpp[2]);
492 
493             // Save the properties into a xml file
494             // so the RgbPlayer can understand the output format.
495             Properties prop = new Properties();
496             prop.setProperty("width", Integer.toString(mWidth));
497             prop.setProperty("height", Integer.toString(mHeight));
498             prop.setProperty("startX", Integer.toString(mStartX));
499             prop.setProperty("startY", Integer.toString(mStartY));
500             prop.setProperty("bytesPerPixel",
501                              Integer.toString(VideoDumpConfig.BYTES_PER_PIXEL));
502             prop.setProperty("frameRate", Integer.toString(VideoDumpConfig.FRAME_RATE));
503             try {
504                 prop.storeToXML(new FileOutputStream(VideoDumpConfig.ROOT_DIR
505                                                      + VideoDumpConfig.PROPERTY_FILE), "");
506             } catch (java.io.IOException e) {
507                 Log.e(TAG, e.getMessage(), e);
508             }
509         }
510 
511         /**
512          * Called when the surface is created or recreated.
513          * Called when the rendering thread starts and whenever the EGL context is lost.
514          * A place to put code to create resources that need to be created when the rendering
515          * starts, and that need to be recreated when the EGL context is lost e.g. texture.
516          * Note that when the EGL context is lost, all OpenGL resources associated with
517          * that context will be automatically deleted.
518          */
onSurfaceCreated(GL10 glUnused, EGLConfig config)519         public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
520             Log.d(TAG, "onSurfaceCreated");
521 
522             /* Set up shaders and handles to their variables */
523             mProgram = createProgram(mVertexShader, mFragmentShader);
524             if (mProgram == 0) {
525                 return;
526             }
527             maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
528             checkGlError("glGetAttribLocation aPosition");
529             if (maPositionHandle == -1) {
530                 throw new RuntimeException("Could not get attrib location for aPosition");
531             }
532             maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
533             checkGlError("glGetAttribLocation aTextureCoord");
534             if (maTextureHandle == -1) {
535                 throw new RuntimeException("Could not get attrib location for aTextureCoord");
536             }
537 
538             muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
539             checkGlError("glGetUniformLocation uMVPMatrix");
540             if (muMVPMatrixHandle == -1) {
541                 throw new RuntimeException("Could not get attrib location for uMVPMatrix");
542             }
543 
544             muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
545             checkGlError("glGetUniformLocation uSTMatrix");
546             if (muSTMatrixHandle == -1) {
547                 throw new RuntimeException("Could not get attrib location for uSTMatrix");
548             }
549 
550 
551             // Create our texture. This has to be done each time the surface is created.
552             int[] textures = new int[1];
553             GLES20.glGenTextures(1, textures, 0);
554 
555             mTextureID = textures[0];
556             GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
557             checkGlError("glBindTexture mTextureID");
558 
559             // Can't do mipmapping with mediaplayer source
560             GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
561                                    GLES20.GL_NEAREST);
562             GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
563                                    GLES20.GL_LINEAR);
564             // Clamp to edge is the only option
565             GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
566                                    GLES20.GL_CLAMP_TO_EDGE);
567             GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
568                                    GLES20.GL_CLAMP_TO_EDGE);
569             checkGlError("glTexParameteri mTextureID");
570 
571             /*
572              * Create the SurfaceTexture that will feed this textureID,
573              * and pass it to the MediaPlayer
574              */
575             mSurface = new SurfaceTexture(mTextureID);
576             mSurface.setOnFrameAvailableListener(this);
577 
578             Surface surface = new Surface(mSurface);
579             mMediaPlayer.setSurface(surface);
580             surface.release();
581 
582             try {
583                 mMediaPlayer.prepare();
584             } catch (IOException t) {
585                 Log.e(TAG, "media player prepare failed");
586             }
587 
588             synchronized(this) {
589                 updateSurface = false;
590             }
591         }
592 
onFrameAvailable(SurfaceTexture surface)593         synchronized public void onFrameAvailable(SurfaceTexture surface) {
594             /* For simplicity, SurfaceTexture calls here when it has new
595              * data available.  Call may come in from some random thread,
596              * so let's be safe and use synchronize. No OpenGL calls can be done here.
597              */
598             mFrameNumber++;
599             updateSurface = true;
600         }
601 
loadShader(int shaderType, String source)602         private int loadShader(int shaderType, String source) {
603             int shader = GLES20.glCreateShader(shaderType);
604             if (shader != 0) {
605                 GLES20.glShaderSource(shader, source);
606                 GLES20.glCompileShader(shader);
607                 int[] compiled = new int[1];
608                 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
609                 if (compiled[0] == 0) {
610                     Log.e(TAG, "Could not compile shader " + shaderType + ":");
611                     Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
612                     GLES20.glDeleteShader(shader);
613                     shader = 0;
614                 }
615             }
616             return shader;
617         }
618 
createProgram(String vertexSource, String fragmentSource)619         private int createProgram(String vertexSource, String fragmentSource) {
620             int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
621             if (vertexShader == 0) {
622                 return 0;
623             }
624             int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
625             if (pixelShader == 0) {
626                 return 0;
627             }
628 
629             int program = GLES20.glCreateProgram();
630             if (program != 0) {
631                 GLES20.glAttachShader(program, vertexShader);
632                 checkGlError("glAttachShader");
633                 GLES20.glAttachShader(program, pixelShader);
634                 checkGlError("glAttachShader");
635                 GLES20.glLinkProgram(program);
636                 int[] linkStatus = new int[1];
637                 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
638                 if (linkStatus[0] != GLES20.GL_TRUE) {
639                     Log.e(TAG, "Could not link program: ");
640                     Log.e(TAG, GLES20.glGetProgramInfoLog(program));
641                     GLES20.glDeleteProgram(program);
642                     program = 0;
643                 }
644             }
645             return program;
646         }
647 
checkGlError(String op)648         private void checkGlError(String op) {
649             int error;
650             while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
651                 Log.e(TAG, op + ": glError " + error);
652                 throw new RuntimeException(op + ": glError " + error);
653             }
654         }
655 
656     }  // End of class VideoDumpRender.
657 
658 }  // End of class VideoDumpView.
659