Home
last modified time | relevance | path

Searched refs:camera (Results 1 – 25 of 227) sorted by relevance

12345678910

/external/robolectric-shadows/robolectric/src/test/java/org/robolectric/shadows/
DShadowCameraTest.java23 private Camera camera; field in ShadowCameraTest
28 camera = Camera.open(); in setUp()
29 shadowCamera = Shadows.shadowOf(camera); in setUp()
39 assertThat(camera).isNotNull(); in testOpen()
45 camera = Camera.open(12); in testOpenWithId()
46 assertThat(camera).isNotNull(); in testOpenWithId()
53 camera.unlock(); in testUnlock()
59 camera.unlock(); in testReconnect()
61 camera.reconnect(); in testReconnect()
67 Camera.Parameters parameters = camera.getParameters(); in testGetParameters()
[all …]
/external/sl4a/Common/src/com/googlecode/android_scripting/facade/
DCameraFacade.java78 Camera camera = openCamera(0); in CameraFacade() local
80 mParameters = camera.getParameters(); in CameraFacade()
82 camera.release(); in CameraFacade()
94 Camera camera = openCamera(cameraId); in cameraCapturePicture() local
95 camera.setParameters(mParameters); in cameraCapturePicture()
98 Method method = camera.getClass().getMethod("setDisplayOrientation", int.class); in cameraCapturePicture()
99 method.invoke(camera, 90); in cameraCapturePicture()
105 FutureActivityTask<SurfaceHolder> previewTask = setPreviewDisplay(camera); in cameraCapturePicture()
106 camera.startPreview(); in cameraCapturePicture()
108 autoFocus(autoFocusResult, camera); in cameraCapturePicture()
[all …]
/external/tensorflow/tensorflow/examples/android/src/org/tensorflow/demo/
DLegacyCameraConnectionFragment.java40 private Camera camera; field in LegacyCameraConnectionFragment
80 camera = Camera.open(index);
83 Camera.Parameters parameters = camera.getParameters();
99 camera.setDisplayOrientation(90);
100 camera.setParameters(parameters);
101 camera.setPreviewTexture(texture);
103 camera.release();
106 camera.setPreviewCallbackWithBuffer(imageListener);
107 Camera.Size s = camera.getParameters().getPreviewSize();
108 camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
[all …]
/external/tensorflow/tensorflow/lite/examples/android/app/src/main/java/org/tensorflow/demo/
DLegacyCameraConnectionFragment.java40 private Camera camera; field in LegacyCameraConnectionFragment
80 camera = Camera.open(index);
83 Camera.Parameters parameters = camera.getParameters();
99 camera.setDisplayOrientation(90);
100 camera.setParameters(parameters);
101 camera.setPreviewTexture(texture);
103 camera.release();
106 camera.setPreviewCallbackWithBuffer(imageListener);
107 Camera.Size s = camera.getParameters().getPreviewSize();
108 camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
[all …]
/external/zxing/qr_scanner/src/com/google/zxing/client/android/camera/
DCameraManager.java17 package com.google.zxing.client.android.camera;
51 private Camera camera; field in CameraManager
81 Camera theCamera = camera; in openDriver()
87 camera = theCamera; in openDriver()
110 if (camera != null) { in closeDriver()
111 camera.release(); in closeDriver()
112 camera = null; in closeDriver()
124 Camera theCamera = camera; in startPreview()
135 if (camera != null && previewing) { in stopPreview()
136 camera.stopPreview(); in stopPreview()
[all …]
DCameraConfigurationManager.java17 package com.google.zxing.client.android.camera;
51 void initFromCameraParameters(Camera camera) { in initFromCameraParameters() argument
52 Camera.Parameters parameters = camera.getParameters(); in initFromCameraParameters()
71 void setDesiredCameraParameters(Camera camera) { in setDesiredCameraParameters() argument
72 Camera.Parameters parameters = camera.getParameters(); in setDesiredCameraParameters()
90 camera.setParameters(parameters); in setDesiredCameraParameters()
101 void setTorch(Camera camera, boolean newSetting) { in setTorch() argument
102 Camera.Parameters parameters = camera.getParameters(); in setTorch()
104 camera.setParameters(parameters); in setTorch()
/external/autotest/client/common_lib/cros/manual/
Dvideo_helper.py73 def check_v4l2_interface(dut, vidpid, camera): argument
82 logging.info('---check v4l2 interface for %s', camera)
83 if get_video_by_name(dut, camera):
85 return False, '{} have no v4l2 interface.'.format(camera)
88 def check_video_stream(dut, is_muted, vidpid, camera): argument
100 process_camera = get_video_streams(dut, camera)
103 return False, '{} fails to stop video streaming.'.format(camera)
106 return False, '{} fails to start video streaming.'.format(camera)
/external/webrtc/talk/app/webrtc/java/android/org/webrtc/
DVideoCapturerAndroid.java72 private android.hardware.Camera camera; // Only non-null while capturing. field in VideoCapturerAndroid
113 public void onError(int error, android.hardware.Camera camera) {
259 if (camera == null) { in switchCamera()
377 if (camera != null) { in release()
420 if (camera != null) { in startCaptureOnCameraThread()
434 camera = android.hardware.Camera.open(id); in startCaptureOnCameraThread()
456 camera.setPreviewTexture(surfaceHelper.getSurfaceTexture()); in startCaptureOnCameraThread()
464 camera.setErrorCallback(cameraErrorCallback); in startCaptureOnCameraThread()
488 if (camera == null) { in startPreviewOnCameraThread()
498 final android.hardware.Camera.Parameters parameters = camera.getParameters(); in startPreviewOnCameraThread()
[all …]
DCameraEnumerator.java63 android.hardware.Camera camera = null; in enumerateFormats() local
66 camera = android.hardware.Camera.open(cameraId); in enumerateFormats()
67 parameters = camera.getParameters(); in enumerateFormats()
72 if (camera != null) { in enumerateFormats()
73 camera.release(); in enumerateFormats()
/external/dynamic_depth/internal/dynamic_depth/
Dcameras.cc24 for (const auto& camera : camera_list_) { in GetNamespaces() local
25 camera->GetNamespaces(ns_name_href_map); in GetNamespaces()
48 std::unique_ptr<Camera> camera = Camera::FromDeserializer(*deserializer); in FromDeserializer() local
49 if (camera == nullptr) { in FromDeserializer()
53 cameras->camera_list_.emplace_back(std::move(camera)); in FromDeserializer()
66 for (const auto& camera : camera_list_) { in GetCameras() local
67 camera_list.push_back(camera.get()); in GetCameras()
/external/autotest/client/cros/camera/
Dcamera_utils.py15 cameras = [os.path.basename(camera) for camera in
19 camera = cameras[0]
20 return camera, int(camera[5:])
/external/autotest/client/site_tests/camera_V4L2/
Dcontrol.certification7 PURPOSE = 'Exercises v4l2 camera devices to verify required operations.'
9 This is used for third-party lab to verify new camera modules.
12 - No camera configuration file is found
20 ATTRIBUTES = "suite:usb-camera"
34 This is used for third-party lab to verify camera module. The camera module is
35 an external camera device to run all test cases.
/external/autotest/server/site_tests/enterprise_CFM_Aver520Updater/
Denterprise_CFM_Aver520Updater.py47 def initialize(self, host, camera): argument
57 self.camera = camera
69 self.vid_pid = self.camera.vid_pid
70 self.usb_spec = self.camera.get_usb_device_spec(self.vid_pid)
218 vid = self.camera.vendor_id
219 pid = self.camera.product_id
224 '{}'.format(self.camera))
249 wait_time, self.camera)
379 self.camera)
401 'test was successful', self.camera)
/external/tensorflow/tensorflow/contrib/pi_examples/
DREADME.md44 reads frames from a camera attached to the Pi. You'll need to install and set up your
45 camera module first. The example uses Video4Linux, so you'll need to install that first.
47 at this blog post: http://www.richardmudhar.com/blog/2015/02/raspberry-pi-camera-and-motion-out-of-…
54 Once that's working, run the following commands to build and run the camera example:
57 make -f tensorflow/contrib/pi_examples/camera/Makefile
58 tensorflow/contrib/pi_examples/camera/gen/bin/camera
61 You should see it looping over camera frames as they come in, and printing the top labels
72 tensorflow/contrib/pi_examples/camera/gen/bin/camera | xargs -n 1 flite -t
/external/skqp/platform_tools/android/apps/skar_java/src/main/java/com/google/skar/examples/helloskar/app/
DHelloCanvasAR.java328 Camera camera = frame.getCamera(); in onDrawFrame() local
331 handleSingleTaps(frame, camera); in onDrawFrame()
338 if (camera.getTrackingState() == TrackingState.PAUSED) { in onDrawFrame()
344 camera.getProjectionMatrix(projMatrix, 0, 0.1f, 100.0f); in onDrawFrame()
349 camera.getViewMatrix(viewMatrix, 0); in onDrawFrame()
357 handleHoldTaps(frame, camera); in onDrawFrame()
383 drawPlanes(canvas, camera); in onDrawFrame()
410 private void handleSingleTaps(Frame frame, Camera camera) { in handleSingleTaps() argument
412 if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) { in handleSingleTaps()
419 && (DrawManager.calculateDistanceToPlane(hit.getHitPose(), camera.getPose()) in handleSingleTaps()
[all …]
/external/skia/platform_tools/android/apps/skar_java/src/main/java/com/google/skar/examples/helloskar/app/
DHelloCanvasAR.java328 Camera camera = frame.getCamera(); in onDrawFrame() local
331 handleSingleTaps(frame, camera); in onDrawFrame()
338 if (camera.getTrackingState() == TrackingState.PAUSED) { in onDrawFrame()
344 camera.getProjectionMatrix(projMatrix, 0, 0.1f, 100.0f); in onDrawFrame()
349 camera.getViewMatrix(viewMatrix, 0); in onDrawFrame()
357 handleHoldTaps(frame, camera); in onDrawFrame()
383 drawPlanes(canvas, camera); in onDrawFrame()
410 private void handleSingleTaps(Frame frame, Camera camera) { in handleSingleTaps() argument
412 if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) { in handleSingleTaps()
419 && (DrawManager.calculateDistanceToPlane(hit.getHitPose(), camera.getPose()) in handleSingleTaps()
[all …]
/external/eigen/demos/opengl/
DREADME5 left button + ctrl quake rotate (rotate around camera position)
6 middle button + ctrl walk (progress along camera's z direction)
7 left button: pan (translate in the XY camera's plane)
9 R : move the camera to initial position
/external/replicaisland/src/com/replica/replicaisland/
DGhostComponent.java62 final CameraSystem camera = sSystemRegistry.cameraSystem; in update() local
86 if (camera != null) { in update()
87 camera.setTarget(parentObject); in update()
161 final CameraSystem camera = sSystemRegistry.cameraSystem; in releaseControl() local
162 if (camera != null) { in releaseControl()
163 camera.setTarget(null); in releaseControl()
179 if (camera.pointVisible(player.getPosition(), player.width)) { in releaseControl()
DSleeperComponent.java66 CameraSystem camera = sSystemRegistry.cameraSystem; in update() local
69 …if (camera.shaking() && camera.pointVisible(parentObject.getPosition(), parentObject.width / 2.0f)… in update()
87 camera.shake(mSlamDuration, mSlamMagnitude); in update()
92 if (!camera.shaking()) { in update()
DTheSourceComponent.java63 CameraSystem camera = sSystemRegistry.cameraSystem; in update() local
68 camera.shake(SHAKE_TIME, CAMERA_HIT_SHAKE_MAGNITUDE); in update()
92 if (camera != null && manager != null && camera.getTarget() == manager.getPlayer()) { in update()
93 camera.setTarget(parentObject); in update()
DCameraBiasComponent.java32 CameraSystem camera = sSystemRegistry.cameraSystem; in update() local
33 if (camera != null) { in update()
34 camera.addCameraBias(parentObject.getPosition()); in update()
/external/tensorflow/tensorflow/lite/g3doc/models/image_classification/
Dios.md4 the iOS device camera. In this tutorial, you will download the demo application
17 * The demo app requires a camera and must be executed on a real iOS device.
19 camera information to classify.
59 cd tensorflow/lite/examples/ios/camera
85 a camera. Select the iOS device from the drop-down menu.
96 You'll have to grant permissions for the app to use the device's camera. Point
97 the camera at various objects and enjoy seeing how the model classifies things!
101 ### Get camera input
104 `tensorflow/lite/examples/ios/camera/CameraExampleViewController.mm`.
159 `tensorflow/lite/examples/ios/camera/CameraExampleViewController.mm`. It's a
[all …]
/external/tensorflow/tensorflow/lite/g3doc/guide/
Dios.md4 the iOS device camera. In this tutorial, you will download the demo application
17 * The demo app requires a camera and must be executed on a real iOS device.
19 camera information to classify.
59 cd tensorflow/lite/examples/ios/camera
85 camera. Select the iOS device from the drop-down menu.
96 You'll have to grant permissions for the app to use the device's camera. Point
97 the camera at various objects and enjoy seeing how the model classifies things!
101 ### Get camera input
104 `tensorflow/lite/examples/ios/camera/CameraExampleViewController.mm`.
159 `tensorflow/lite/examples/ios/camera/CameraExampleViewController.mm`. It's a
[all …]
/external/autotest/server/site_tests/enterprise_CFM_PTZStress/
Dcontrol.PTZPro221 2. During meeting PTZ the camera according to the control file.
24 2. Verify PTZ signals are sent to the camera.
30 'camera': cfm_usb_devices.PTZ_PRO_2_CAMERA,
33 'usb_trace_path': '/tmp/camera.mon.out'
/external/robolectric-shadows/shadows/framework/src/main/java/org/robolectric/shadows/
DShadowCamera.java51 Camera camera = newInstanceOf(Camera.class); in open() local
52 ShadowCamera shadowCamera = Shadow.extract(camera); in open()
54 return camera; in open()
60 Camera camera = newInstanceOf(Camera.class); in open() local
61 ShadowCamera shadowCamera = Shadow.extract(camera); in open()
63 return camera; in open()
176 public void invokeAutoFocusCallback(boolean success, Camera camera) { in invokeAutoFocusCallback() argument
183 autoFocusCallback.onAutoFocus(success, camera); in invokeAutoFocusCallback()
501 protected void __constructor__(Camera camera, int width, int height) { in __constructor__() argument

12345678910