| /external/jsilver/src/com/google/streamhtmlparser/util/ |
| D | CharacterRecorder.java | 21 * whether recording is currently enabled. 24 * supplied character to the recording buffer but only if 25 * recording is in progress. This is useful in our 27 * as the caller logic to enable/disable recording is decoupled from the logic 28 * of recording. 38 * <li>There is a size limit to the recording buffer as set in 41 * recording is currently enabled. 54 * This is where characters provided for recording are stored. Given 60 /** Holds whether we are currently recording characters or not. */ 61 private boolean recording; field in CharacterRecorder [all …]
|
| /external/skia/tests/graphite/ |
| D | GraphitePromiseImageTest.cpp | 16 #include "include/gpu/graphite/Recording.h" 260 std::unique_ptr<Recording> recording = testContext.fRecorder->snap(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() local 263 REPORTER_ASSERT(reporter, context->insertRecording({ recording.get() })); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 284 std::unique_ptr<Recording> recording = testContext.fRecorder->snap(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() local 288 REPORTER_ASSERT(reporter, context->insertRecording({ recording.get() })); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 289 // testContext.fImg should still be fulfilled from the first time we inserted a Recording. in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 305 std::unique_ptr<Recording> recording = testContext.fRecorder->snap(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() local 309 REPORTER_ASSERT(reporter, context->insertRecording({ recording.get() })); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 314 // fImg's proxy is reffed by the recording so, despite fImg being reset earlier, in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 315 // the imageRelease callback doesn't occur until the recording is deleted. in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() [all …]
|
| D | GraphiteYUVAPromiseImageTest.cpp | 16 #include "include/gpu/graphite/Recording.h" 298 std::unique_ptr<Recording> recording = testContext.fRecorder->snap(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() local 302 REPORTER_ASSERT(reporter, context->insertRecording({ recording.get() })); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 327 std::unique_ptr<Recording> recording = testContext.fRecorder->snap(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() local 332 REPORTER_ASSERT(reporter, context->insertRecording({ recording.get() })); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 333 // testContext.fImg should still be fulfilled from the first time we inserted a Recording. in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 351 std::unique_ptr<Recording> recording = testContext.fRecorder->snap(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() local 356 REPORTER_ASSERT(reporter, context->insertRecording({ recording.get() })); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 362 // fImg's proxy is reffed by the recording so, despite fImg being reset earlier, in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() 363 // the imageRelease callback doesn't occur until the recording is deleted. in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_RENDERING_CONTEXTS() [all …]
|
| D | ImageWrapTextureMipmapsTest.cpp | 57 std::unique_ptr<Recording> recording = recorder->snap(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS() local 60 std::unique_ptr<Recording>(static_cast<Recording*>(context)); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS() 65 recordingInfo.fRecording = recording.get(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS() 66 recordingInfo.fFinishedContext = recording.release(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS() 68 ERRORF(reporter, "Could not insert recording"); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS() 117 recording = recorder->snap(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS() 118 recordingInfo.fRecording = recording.get(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS() 119 recordingInfo.fFinishedContext = recording.release(); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS() 121 ERRORF(reporter, "Could not insert recording"); in DEF_CONDITIONAL_GRAPHITE_TEST_FOR_ALL_CONTEXTS()
|
| D | ComputeTest.cpp | 13 #include "include/gpu/graphite/Recording.h" 78 std::unique_ptr<Recording> submit_recording(Context* context, in submit_recording() 81 std::unique_ptr<Recording> recording = recorder->snap(); in submit_recording() local 82 if (!recording) { in submit_recording() 87 insertInfo.fRecording = recording.get(); in submit_recording() 91 return recording; in submit_recording() 221 std::unique_ptr<Recording> recording = recorder->snap(); in DEF_GRAPHITE_TEST_FOR_DAWN_AND_METAL_CONTEXTS() local 222 if (!recording) { in DEF_GRAPHITE_TEST_FOR_DAWN_AND_METAL_CONTEXTS() 223 ERRORF(reporter, "Failed to make recording"); in DEF_GRAPHITE_TEST_FOR_DAWN_AND_METAL_CONTEXTS() 228 insertInfo.fRecording = recording.get(); in DEF_GRAPHITE_TEST_FOR_DAWN_AND_METAL_CONTEXTS() [all …]
|
| D | RecordingOrderTest.cpp | 13 #include "include/gpu/graphite/Recording.h" 50 // Set up a recording to clear the surface in run_test() 52 std::unique_ptr<Recording> clearRecording = recorder->snap(); in run_test() 54 ERRORF(reporter, "Recording creation failed"); in run_test() 58 // Draw some text and get recording in run_test() 70 std::unique_ptr<Recording> text0Recording = recorder->snap(); in run_test() 72 // Draw some more text and get recording in run_test() 77 std::unique_ptr<Recording> text1Recording = recorder->snap(); in run_test()
|
| /external/autotest/server/brillo/feedback/ |
| D | closed_loop_audio_client.py | 18 # Constants used when recording playback. 25 # Recording sample rate (48kHz). 27 # Recording sample format is signed 16-bit PCM (two bytes). 29 # Default frequency to generate audio at (used for recording). 32 # The peak when recording silence is 5% of the max volume. 51 This class (and the queries it instantiates) perform playback and recording 127 """Waits for recording to finish and copies the file to the host. 131 @raise error.TestError: Error while validating the recording. 133 # Wait for recording to finish. 138 'Recording did not terminate within %d seconds' % timeout) [all …]
|
| /external/skia/include/core/ |
| D | SkPictureRecorder.h | 38 @param bounds the cull rect used when recording this picture. Any drawing the falls outside 41 @param recordFlags optional flags that control recording. 53 /** Returns the recording canvas if one is active, or NULL if recording is 59 * Signal that the caller is done recording. This invalidates the canvas returned by 63 * The returned picture is immutable. If during recording drawables were added to the canvas, 64 * these will have been "drawn" into a recording canvas, so that this resulting picture will 71 * Signal that the caller is done recording, and update the cull rect to use for bounding 82 * Signal that the caller is done recording. This invalidates the canvas returned by 87 * may contain live references to other drawables (if they were added to the recording canvas) 97 canvas. This call doesn't close the current recording.
|
| /external/skia/include/gpu/graphite/ |
| D | GraphiteTypes.h | 27 class Recording; variable 38 * The fFinishedProc is called when the Recording has been submitted and finished on the GPU, or 41 * the Recording that they may be holding onto. If the Recording is successfully submitted to the 46 * information about execution of the recording on the GPU. Only the stats requested using 53 * the Recording contains any such draws. It must be Graphite-backed and its backing texture's 62 * command stream. At some time before issuing commands in the Recording, the fWaitSemaphores will 64 * shader work. Similarly, at some time after issuing the Recording's commands, the 66 * and signal operations will either be immediately before or after the given Recording's command 75 Recording* fRecording = nullptr; 94 * The fFinishedProc is called when the Recording has been submitted and finished on the GPU, or [all …]
|
| /external/caliper/caliper/src/main/java/com/google/caliper/worker/ |
| D | AggregateAllocationsRecorder.java | 33 private volatile boolean recording = false; field in AggregateAllocationsRecorder 38 if (recording) { 50 checkState(!recording, "startRecording called, but we were already recording."); in doStartRecording() 53 recording = true; in doStartRecording() 57 checkState(recording, "stopRecording called, but we were not recording."); in stopRecording() 58 recording = false; in stopRecording()
|
| D | AllAllocationsRecorder.java | 35 private volatile boolean recording = false; field in AllAllocationsRecorder 41 if (recording) { 80 checkState(!recording, "startRecording called, but we were already recording."); in doStartRecording() 82 recording = true; in doStartRecording() 86 checkState(recording, "stopRecording called, but we were not recording."); in stopRecording() 87 recording = false; in stopRecording()
|
| /external/jetpack-camera-app/data/settings/src/main/java/com/google/jetpackcamera/settings/model/ |
| D | CaptureMode.kt | 28 * Hold the Capture button to start recording, and release to complete the recording. 35 * Tap the Capture Button to start recording. 36 * Hold the Capture button to start recording; releasing will not stop the recording. 38 * Tap the capture button again after recording has started to complete the recording.
|
| /external/pytorch/aten/src/ATen/native/vulkan/api/ |
| D | Command.cpp | 59 state_ = CommandBuffer::State::RECORDING; in begin() 64 state_ == CommandBuffer::State::RECORDING || in end() 67 "is not RECORDING or SUBMITTED."); in end() 69 if (state_ == CommandBuffer::State::RECORDING) { in end() 80 state_ == CommandBuffer::State::RECORDING, in bind_pipeline() 82 "is not RECORDING."); in bind_pipeline() 122 state_ == CommandBuffer::State::RECORDING, in insert_barrier() 124 "is not DESCRIPTORS_BOUND or RECORDING."); in insert_barrier() 178 state_ = CommandBuffer::State::RECORDING; in dispatch() 201 state_ = CommandBuffer::State::RECORDING; in copy_buffer_to_buffer() [all …]
|
| /external/libchrome/base/android/java/src/org/chromium/base/ |
| D | AnimationFrameTimeHistogram.java | 16 * any jankiness of short Chrome Android animations. It is limited to few seconds of recording. 62 * Start recording frame times. The recording can fail if it exceeds a few seconds. 69 * End recording and save it to histogram. It won't save histogram if the recording wasn't 102 * @return Whether the recording was successful. If successful, the result is available via 121 * the recording and getting the result. 132 Log.w(TAG, "Animation frame time recording reached the maximum number. It's either" in onTimeUpdate() 133 + "the animation took too long or recording end is not called."); in onTimeUpdate()
|
| /external/executorch/backends/vulkan/runtime/vk_api/ |
| D | Command.cpp | 65 state_ = CommandBuffer::State::RECORDING; in begin() 70 state_ == CommandBuffer::State::RECORDING || in end() 73 "is not RECORDING or SUBMITTED."); in end() 75 if (state_ == CommandBuffer::State::RECORDING) { in end() 86 state_ == CommandBuffer::State::RECORDING, in bind_pipeline() 88 "is not RECORDING."); in bind_pipeline() 128 state_ == CommandBuffer::State::RECORDING, in insert_barrier() 130 "is not DESCRIPTORS_BOUND or RECORDING."); in insert_barrier() 179 state_ = CommandBuffer::State::RECORDING; in dispatch() 218 state_ = CommandBuffer::State::RECORDING; in blit() [all …]
|
| /external/fonttools/Lib/fontTools/pens/ |
| D | recordingPen.py | 1 """Pen recording operations that can be accessed or replayed.""" 16 def replayRecording(recording, pen): argument 17 """Replay a recording, as produced by RecordingPen or DecomposingRecordingPen, 20 Note that recording does not have to be produced by those pens. 24 for operator, operands in recording: 29 """Pen recording operations that can be accessed or replayed. 31 The recording can be accessed as pen.value; or replayed using 114 """PointPen recording operations that can be accessed or replayed. 116 The recording can be accessed as pen.value; or replayed using 181 Factor is typically between 0 and 1. 0 means the first recording, [all …]
|
| /external/googleapis/google/apps/meet/v2/ |
| D | resource.proto | 243 // Metadata about a recording created during a conference. 244 message Recording { message 246 type: "meet.googleapis.com/Recording" 247 pattern: "conferenceRecords/{conference_record}/recordings/{recording}" 249 singular: "recording" 252 // Current state of the recording session. 257 // An active recording session has started. 260 // This recording session has ended, but the recording file hasn't been 264 // Recording file is generated and ready to download. 269 // Output only. Recording is saved to Google Drive as an MP4 file. The [all …]
|
| /external/autotest/client/site_tests/video_AVAnalysis/ |
| D | video_AVAnalysis.py | 24 The recording will be carried out by a recording server connected 26 The recording will then be uploaded to Google Cloud storage and analyzed 79 This method uses DUT IP to calculate IP of recording server. Note that 100 """Starts recording on recording server. 102 Makes an http POST request to the recording server to start 103 recording and processes the response. The body of the post 120 'Recording server failed with response: ({}, {})'.format( 139 """Plays video on DUT for recording & analysis."""
|
| /external/perfetto/docs/quickstart/ |
| D | chrome-tracing.md | 5 > To record traces from Chrome on Android, follow the [instructions for recording Android system tr… 9 ## Recording a trace 16 3. Сonfigure settings in **"Recording settings"**. 25 > - To share your config settings go to the "Recording command" menu. 29 …o "Android" and then see the categories in the generated config in the "Recording Command" section… 34 5. Now you can start the trace recording. Press the **"Start recording"** button when ready.
|
| /external/googleapis/google/apps/meet/v2beta/ |
| D | resource.proto | 249 // Metadata about a recording created during a conference. 250 message Recording { message 252 type: "meet.googleapis.com/Recording" 253 pattern: "conferenceRecords/{conference_record}/recordings/{recording}" 255 singular: "recording" 258 // Current state of the recording session. 263 // An active recording session has started. 266 // This recording session has ended, but the recording file hasn't been 270 // Recording file is generated and ready to download. 275 // Output only. Recording is saved to Google Drive as an mp4 file. The [all …]
|
| /external/skia/src/gpu/graphite/ |
| D | Recording.cpp | 8 #include "include/gpu/graphite/Recording.h" 29 Recording::Recording(uint32_t uniqueID, in Recording() function in skgpu::graphite::Recording 43 Recording::~Recording() { in ~Recording() 48 std::size_t Recording::ProxyHash::operator()(const sk_sp<TextureProxy> &proxy) const { in operator ()() 52 Recording::LazyProxyData::LazyProxyData(const Caps* caps, in LazyProxyData() 76 TextureProxy* Recording::LazyProxyData::lazyProxy() { return fTargetProxy.get(); } in lazyProxy() 78 sk_sp<TextureProxy> Recording::LazyProxyData::refLazyProxy() { return fTargetProxy; } in refLazyProxy() 80 bool Recording::LazyProxyData::lazyInstantiate(ResourceProvider* resourceProvider, in lazyInstantiate() 202 // is discarded, the Recording will automatically be a no-op on replay while still correctly in addCommands()
|
| D | Recorder.cpp | 18 #include "include/gpu/graphite/Recording.h" 135 // Any finished procs that haven't been passed to a Recording fail in ~Recorder() 155 std::unique_ptr<Recording> Recorder::snap() { in snap() 167 // Collect all pending tasks on the deferred recording canvas and any other tracked device. in snap() 171 // data cache so that they can be instantiated easily when the Recording is inserted. in snap() 172 std::unordered_set<sk_sp<TextureProxy>, Recording::ProxyHash> nonVolatileLazyProxies; in snap() 173 std::unordered_set<sk_sp<TextureProxy>, Recording::ProxyHash> volatileLazyProxies; in snap() 191 std::unique_ptr<Recording> recording(new Recording(fNextRecordingID++, in snap() local 198 // before moving the root task list to the Recording. in snap() 199 bool valid = fDrawBufferManager->transferToRecording(recording.get()); in snap() [all …]
|
| /external/armnn/python/pyarmnn/examples/common/ |
| D | audio_capture.py | 69 """Sets a time duration (in integer seconds) for recording audio. Total time duration is 71 result in endless recording. 74 duration (int): User-provided command line argument for time duration of recording. 82 print(f"Setting minimum recording duration...") 85 print(f"Recording duration is {duration} seconds") 97 """3 second countdown prior to recording audio.""" 98 print("Beginning recording in...") 138 Update counter if recording duration is finite.
|
| /external/jetpack-camera-app/core/camera/src/main/java/com/google/jetpackcamera/core/camera/ |
| D | VideoCaptureControlEvent.kt | 26 * Starts video recording. 28 * @param onVideoRecord Callback to handle video recording events. 38 * Pauses a video recording. 43 * Resumes a paused video recording. 48 * Stops video recording.
|
| /external/oboe/samples/LiveEffect/src/main/cpp/ |
| D | LiveEffectEngine.cpp | 63 * The playback stream must be closed before the recording stream. If the in closeStreams() 64 * recording stream were to be closed first the playback stream's in closeStreams() 65 * callback may attempt to read from the recording stream in closeStreams() 66 * which would cause the app to crash since the recording stream would be in closeStreams() 78 // (e.g. sample rate) to create the recording stream. By matching the in openStreams() 110 * Sets the stream parameters which are specific to recording, 114 * @param builder The recording stream builder 115 * @param sampleRate The desired sample rate of the recording stream 145 * Set the stream parameters which are common to both recording and playback 147 * @param builder The playback or recording stream builder
|