1 // Copyright 2013 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 package org.chromium.media; 6 7 import android.content.Context; 8 import android.graphics.ImageFormat; 9 import android.graphics.SurfaceTexture; 10 import android.graphics.SurfaceTexture.OnFrameAvailableListener; 11 import android.hardware.Camera; 12 import android.hardware.Camera.PreviewCallback; 13 import android.opengl.GLES20; 14 import android.util.Log; 15 import android.view.Surface; 16 import android.view.WindowManager; 17 18 import org.chromium.base.CalledByNative; 19 import org.chromium.base.JNINamespace; 20 21 import java.io.IOException; 22 import java.util.Iterator; 23 import java.util.List; 24 import java.util.concurrent.locks.ReentrantLock; 25 26 @JNINamespace("media") 27 public class VideoCapture implements PreviewCallback, OnFrameAvailableListener { 28 static class CaptureCapability { 29 public int mWidth = 0; 30 public int mHeight = 0; 31 public int mDesiredFps = 0; 32 } 33 34 // Some devices with OS older than JELLY_BEAN don't support YV12 format correctly. 35 // Some devices don't support YV12 format correctly even with JELLY_BEAN or newer OS. 36 // To work around the issues on those devices, we'd have to request NV21. 37 // This is a temporary hack till device manufacturers fix the problem or 38 // we don't need to support those devices any more. 39 private static class DeviceImageFormatHack { 40 private static final String[] sBUGGY_DEVICE_LIST = { 41 "SAMSUNG-SGH-I747", 42 "ODROID-U2", 43 }; 44 getImageFormat()45 static int getImageFormat() { 46 if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) { 47 return ImageFormat.NV21; 48 } 49 50 for (String buggyDevice : sBUGGY_DEVICE_LIST) { 51 if (buggyDevice.contentEquals(android.os.Build.MODEL)) { 52 return ImageFormat.NV21; 53 } 54 } 55 return ImageFormat.YV12; 56 } 57 } 58 59 private Camera mCamera; 60 public ReentrantLock mPreviewBufferLock = new ReentrantLock(); 61 private int mImageFormat = ImageFormat.YV12; 62 private byte[] mColorPlane = null; 63 private Context mContext = null; 64 // True when native code has started capture. 65 private boolean mIsRunning = false; 66 67 private static final int NUM_CAPTURE_BUFFERS = 3; 68 private int mExpectedFrameSize = 0; 69 private int mId = 0; 70 // Native callback context variable. 71 private long mNativeVideoCaptureDeviceAndroid = 0; 72 private int[] mGlTextures = null; 73 private SurfaceTexture mSurfaceTexture = null; 74 private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65; 75 76 private int mCameraOrientation = 0; 77 private int mCameraFacing = 0; 78 private int mDeviceOrientation = 0; 79 80 CaptureCapability mCurrentCapability = null; 81 private static final String TAG = "VideoCapture"; 82 83 @CalledByNative createVideoCapture( Context context, int id, long nativeVideoCaptureDeviceAndroid)84 public static VideoCapture createVideoCapture( 85 Context context, int id, long nativeVideoCaptureDeviceAndroid) { 86 return new VideoCapture(context, id, nativeVideoCaptureDeviceAndroid); 87 } 88 VideoCapture( Context context, int id, long nativeVideoCaptureDeviceAndroid)89 public VideoCapture( 90 Context context, int id, long nativeVideoCaptureDeviceAndroid) { 91 mContext = context; 92 mId = id; 93 mNativeVideoCaptureDeviceAndroid = nativeVideoCaptureDeviceAndroid; 94 } 95 96 // Returns true on success, false otherwise. 97 @CalledByNative allocate(int width, int height, int frameRate)98 public boolean allocate(int width, int height, int frameRate) { 99 Log.d(TAG, "allocate: requested width=" + width + 100 ", height=" + height + ", frameRate=" + frameRate); 101 try { 102 mCamera = Camera.open(mId); 103 } catch (RuntimeException ex) { 104 Log.e(TAG, "allocate:Camera.open: " + ex); 105 return false; 106 } 107 108 try { 109 Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 110 Camera.getCameraInfo(mId, cameraInfo); 111 mCameraOrientation = cameraInfo.orientation; 112 mCameraFacing = cameraInfo.facing; 113 mDeviceOrientation = getDeviceOrientation(); 114 Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation + 115 ", camera orientation=" + mCameraOrientation + 116 ", facing=" + mCameraFacing); 117 118 Camera.Parameters parameters = mCamera.getParameters(); 119 120 // Calculate fps. 121 List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); 122 if (listFpsRange == null || listFpsRange.size() == 0) { 123 Log.e(TAG, "allocate: no fps range found"); 124 return false; 125 } 126 int frameRateInMs = frameRate * 1000; 127 Iterator itFpsRange = listFpsRange.iterator(); 128 int[] fpsRange = (int[]) itFpsRange.next(); 129 // Use the first range as default. 130 int fpsMin = fpsRange[0]; 131 int fpsMax = fpsRange[1]; 132 int newFrameRate = (fpsMin + 999) / 1000; 133 while (itFpsRange.hasNext()) { 134 fpsRange = (int[]) itFpsRange.next(); 135 if (fpsRange[0] <= frameRateInMs && 136 frameRateInMs <= fpsRange[1]) { 137 fpsMin = fpsRange[0]; 138 fpsMax = fpsRange[1]; 139 newFrameRate = frameRate; 140 break; 141 } 142 } 143 frameRate = newFrameRate; 144 Log.d(TAG, "allocate: fps set to " + frameRate); 145 146 mCurrentCapability = new CaptureCapability(); 147 mCurrentCapability.mDesiredFps = frameRate; 148 149 // Calculate size. 150 List<Camera.Size> listCameraSize = 151 parameters.getSupportedPreviewSizes(); 152 int minDiff = Integer.MAX_VALUE; 153 int matchedWidth = width; 154 int matchedHeight = height; 155 Iterator itCameraSize = listCameraSize.iterator(); 156 while (itCameraSize.hasNext()) { 157 Camera.Size size = (Camera.Size) itCameraSize.next(); 158 int diff = Math.abs(size.width - width) + 159 Math.abs(size.height - height); 160 Log.d(TAG, "allocate: support resolution (" + 161 size.width + ", " + size.height + "), diff=" + diff); 162 // TODO(wjia): Remove this hack (forcing width to be multiple 163 // of 32) by supporting stride in video frame buffer. 164 // Right now, VideoCaptureController requires compact YV12 165 // (i.e., with no padding). 166 if (diff < minDiff && (size.width % 32 == 0)) { 167 minDiff = diff; 168 matchedWidth = size.width; 169 matchedHeight = size.height; 170 } 171 } 172 if (minDiff == Integer.MAX_VALUE) { 173 Log.e(TAG, "allocate: can not find a resolution whose width " + 174 "is multiple of 32"); 175 return false; 176 } 177 mCurrentCapability.mWidth = matchedWidth; 178 mCurrentCapability.mHeight = matchedHeight; 179 Log.d(TAG, "allocate: matched width=" + matchedWidth + 180 ", height=" + matchedHeight); 181 182 calculateImageFormat(matchedWidth, matchedHeight); 183 184 if (parameters.isVideoStabilizationSupported()) { 185 Log.d(TAG, "Image stabilization supported, currently: " 186 + parameters.getVideoStabilization() + ", setting it."); 187 parameters.setVideoStabilization(true); 188 } else { 189 Log.d(TAG, "Image stabilization not supported."); 190 } 191 192 parameters.setPreviewSize(matchedWidth, matchedHeight); 193 parameters.setPreviewFormat(mImageFormat); 194 parameters.setPreviewFpsRange(fpsMin, fpsMax); 195 mCamera.setParameters(parameters); 196 197 // Set SurfaceTexture. 198 mGlTextures = new int[1]; 199 // Generate one texture pointer and bind it as an external texture. 200 GLES20.glGenTextures(1, mGlTextures, 0); 201 GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]); 202 // No mip-mapping with camera source. 203 GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, 204 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); 205 GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, 206 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); 207 // Clamp to edge is only option. 208 GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, 209 GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); 210 GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, 211 GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); 212 213 mSurfaceTexture = new SurfaceTexture(mGlTextures[0]); 214 mSurfaceTexture.setOnFrameAvailableListener(null); 215 216 mCamera.setPreviewTexture(mSurfaceTexture); 217 218 int bufSize = matchedWidth * matchedHeight * 219 ImageFormat.getBitsPerPixel(mImageFormat) / 8; 220 for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) { 221 byte[] buffer = new byte[bufSize]; 222 mCamera.addCallbackBuffer(buffer); 223 } 224 mExpectedFrameSize = bufSize; 225 } catch (IOException ex) { 226 Log.e(TAG, "allocate: " + ex); 227 return false; 228 } 229 230 return true; 231 } 232 233 @CalledByNative queryWidth()234 public int queryWidth() { 235 return mCurrentCapability.mWidth; 236 } 237 238 @CalledByNative queryHeight()239 public int queryHeight() { 240 return mCurrentCapability.mHeight; 241 } 242 243 @CalledByNative queryFrameRate()244 public int queryFrameRate() { 245 return mCurrentCapability.mDesiredFps; 246 } 247 248 @CalledByNative getColorspace()249 public int getColorspace() { 250 switch (mImageFormat) { 251 case ImageFormat.YV12: 252 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12; 253 case ImageFormat.NV21: 254 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV21; 255 case ImageFormat.YUY2: 256 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YUY2; 257 case ImageFormat.NV16: 258 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV16; 259 case ImageFormat.JPEG: 260 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_JPEG; 261 case ImageFormat.RGB_565: 262 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_RGB_565; 263 case ImageFormat.UNKNOWN: 264 default: 265 return AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN; 266 } 267 } 268 269 @CalledByNative startCapture()270 public int startCapture() { 271 if (mCamera == null) { 272 Log.e(TAG, "startCapture: camera is null"); 273 return -1; 274 } 275 276 mPreviewBufferLock.lock(); 277 try { 278 if (mIsRunning) { 279 return 0; 280 } 281 mIsRunning = true; 282 } finally { 283 mPreviewBufferLock.unlock(); 284 } 285 mCamera.setPreviewCallbackWithBuffer(this); 286 mCamera.startPreview(); 287 return 0; 288 } 289 290 @CalledByNative stopCapture()291 public int stopCapture() { 292 if (mCamera == null) { 293 Log.e(TAG, "stopCapture: camera is null"); 294 return 0; 295 } 296 297 mPreviewBufferLock.lock(); 298 try { 299 if (!mIsRunning) { 300 return 0; 301 } 302 mIsRunning = false; 303 } finally { 304 mPreviewBufferLock.unlock(); 305 } 306 307 mCamera.stopPreview(); 308 mCamera.setPreviewCallbackWithBuffer(null); 309 return 0; 310 } 311 312 @CalledByNative deallocate()313 public void deallocate() { 314 if (mCamera == null) 315 return; 316 317 stopCapture(); 318 try { 319 mCamera.setPreviewTexture(null); 320 if (mGlTextures != null) 321 GLES20.glDeleteTextures(1, mGlTextures, 0); 322 mCurrentCapability = null; 323 mCamera.release(); 324 mCamera = null; 325 } catch (IOException ex) { 326 Log.e(TAG, "deallocate: failed to deallocate camera, " + ex); 327 return; 328 } 329 } 330 331 @Override onPreviewFrame(byte[] data, Camera camera)332 public void onPreviewFrame(byte[] data, Camera camera) { 333 mPreviewBufferLock.lock(); 334 try { 335 if (!mIsRunning) { 336 return; 337 } 338 if (data.length == mExpectedFrameSize) { 339 int rotation = getDeviceOrientation(); 340 if (rotation != mDeviceOrientation) { 341 mDeviceOrientation = rotation; 342 Log.d(TAG, 343 "onPreviewFrame: device orientation=" + 344 mDeviceOrientation + ", camera orientation=" + 345 mCameraOrientation); 346 } 347 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) { 348 rotation = 360 - rotation; 349 } 350 rotation = (mCameraOrientation + rotation) % 360; 351 nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid, 352 data, mExpectedFrameSize, rotation); 353 } 354 } finally { 355 mPreviewBufferLock.unlock(); 356 if (camera != null) { 357 camera.addCallbackBuffer(data); 358 } 359 } 360 } 361 362 // TODO(wjia): investigate whether reading from texture could give better 363 // performance and frame rate. 364 @Override onFrameAvailable(SurfaceTexture surfaceTexture)365 public void onFrameAvailable(SurfaceTexture surfaceTexture) { } 366 367 private static class ChromiumCameraInfo { 368 private final int mId; 369 private final Camera.CameraInfo mCameraInfo; 370 ChromiumCameraInfo(int index)371 private ChromiumCameraInfo(int index) { 372 mId = index; 373 mCameraInfo = new Camera.CameraInfo(); 374 Camera.getCameraInfo(index, mCameraInfo); 375 } 376 377 @CalledByNative("ChromiumCameraInfo") getNumberOfCameras()378 private static int getNumberOfCameras() { 379 return Camera.getNumberOfCameras(); 380 } 381 382 @CalledByNative("ChromiumCameraInfo") getAt(int index)383 private static ChromiumCameraInfo getAt(int index) { 384 return new ChromiumCameraInfo(index); 385 } 386 387 @CalledByNative("ChromiumCameraInfo") getId()388 private int getId() { 389 return mId; 390 } 391 392 @CalledByNative("ChromiumCameraInfo") getDeviceName()393 private String getDeviceName() { 394 return "camera " + mId + ", facing " + 395 (mCameraInfo.facing == 396 Camera.CameraInfo.CAMERA_FACING_FRONT ? "front" : "back"); 397 } 398 399 @CalledByNative("ChromiumCameraInfo") getOrientation()400 private int getOrientation() { 401 return mCameraInfo.orientation; 402 } 403 } 404 nativeOnFrameAvailable( long nativeVideoCaptureDeviceAndroid, byte[] data, int length, int rotation)405 private native void nativeOnFrameAvailable( 406 long nativeVideoCaptureDeviceAndroid, 407 byte[] data, 408 int length, 409 int rotation); 410 getDeviceOrientation()411 private int getDeviceOrientation() { 412 int orientation = 0; 413 if (mContext != null) { 414 WindowManager wm = (WindowManager) mContext.getSystemService( 415 Context.WINDOW_SERVICE); 416 switch(wm.getDefaultDisplay().getRotation()) { 417 case Surface.ROTATION_90: 418 orientation = 90; 419 break; 420 case Surface.ROTATION_180: 421 orientation = 180; 422 break; 423 case Surface.ROTATION_270: 424 orientation = 270; 425 break; 426 case Surface.ROTATION_0: 427 default: 428 orientation = 0; 429 break; 430 } 431 } 432 return orientation; 433 } 434 calculateImageFormat(int width, int height)435 private void calculateImageFormat(int width, int height) { 436 mImageFormat = DeviceImageFormatHack.getImageFormat(); 437 } 438 } 439