Home
last modified time | relevance | path

Searched full:audio (Results 1 – 25 of 5869) sorted by relevance

12345678910>>...235

/external/webrtc/system_wrappers/source/
Dfield_trial_unittest.cc22 InitFieldTrialsFromString("Audio/Enabled/"); in TEST()
23 InitFieldTrialsFromString("Audio/Enabled/Video/Disabled/"); in TEST()
25 EXPECT_TRUE(FieldTrialsStringIsValid("Audio/Enabled/")); in TEST()
26 EXPECT_TRUE(FieldTrialsStringIsValid("Audio/Enabled/Video/Disabled/")); in TEST()
29 InitFieldTrialsFromString("Audio/Enabled/Audio/Enabled/"); in TEST()
30 InitFieldTrialsFromString("Audio/Enabled/B/C/Audio/Enabled/"); in TEST()
31 EXPECT_TRUE(FieldTrialsStringIsValid("Audio/Enabled/Audio/Enabled/")); in TEST()
32 EXPECT_TRUE(FieldTrialsStringIsValid("Audio/Enabled/B/C/Audio/Enabled/")); in TEST()
37 RTC_EXPECT_DEATH(InitFieldTrialsFromString("Audio/EnabledVideo/Disabled/"), in TEST()
39 RTC_EXPECT_DEATH(InitFieldTrialsFromString("Audio/Enabled//Video/Disabled/"), in TEST()
[all …]
/external/ltp/testcases/kernel/device-drivers/v4l/user_space/
Dtest_VIDIOC_ENUMAUDIO.c41 struct v4l2_audio audio; in test_VIDIOC_ENUMAUDIO() local
47 memset(&audio, 0xff, sizeof(audio)); in test_VIDIOC_ENUMAUDIO()
48 audio.index = i; in test_VIDIOC_ENUMAUDIO()
49 ret_enum = ioctl(get_video_fd(), VIDIOC_ENUMAUDIO, &audio); in test_VIDIOC_ENUMAUDIO()
58 CU_ASSERT_EQUAL(audio.index, i); in test_VIDIOC_ENUMAUDIO()
60 CU_ASSERT(0 < strlen((char *)audio.name)); in test_VIDIOC_ENUMAUDIO()
62 ((char *)audio.name, sizeof(audio.name))); in test_VIDIOC_ENUMAUDIO()
64 //CU_ASSERT_EQUAL(audio.capability, ?); in test_VIDIOC_ENUMAUDIO()
65 //CU_ASSERT_EQUAL(audio.mode, ?); in test_VIDIOC_ENUMAUDIO()
66 CU_ASSERT_EQUAL(audio.reserved[0], 0); in test_VIDIOC_ENUMAUDIO()
[all …]
Dtest_VIDIOC_AUDIO.c67 struct v4l2_audio audio; in test_VIDIOC_G_AUDIO() local
70 memset(&audio, 0xff, sizeof(audio)); in test_VIDIOC_G_AUDIO()
71 ret_get = ioctl(get_video_fd(), VIDIOC_G_AUDIO, &audio); in test_VIDIOC_G_AUDIO()
80 //CU_ASSERT_EQUAL(audio.index, ?); in test_VIDIOC_G_AUDIO()
82 CU_ASSERT(0 < strlen((char *)audio.name)); in test_VIDIOC_G_AUDIO()
83 CU_ASSERT(valid_string((char *)audio.name, sizeof(audio.name))); in test_VIDIOC_G_AUDIO()
85 CU_ASSERT(valid_audio_capability(audio.capability)); in test_VIDIOC_G_AUDIO()
86 CU_ASSERT(valid_audio_mode(audio.mode)); in test_VIDIOC_G_AUDIO()
88 CU_ASSERT_EQUAL(audio.reserved[0], 0); in test_VIDIOC_G_AUDIO()
89 CU_ASSERT_EQUAL(audio.reserved[1], 0); in test_VIDIOC_G_AUDIO()
[all …]
/external/libwebm/webm_parser/tests/
Daudio_parser_test.cc15 using webm::Audio;
27 const Audio audio = parser_.value(); in TEST_F() local
29 EXPECT_FALSE(audio.sampling_frequency.is_present()); in TEST_F()
30 EXPECT_EQ(8000, audio.sampling_frequency.value()); in TEST_F()
32 EXPECT_FALSE(audio.output_frequency.is_present()); in TEST_F()
33 EXPECT_EQ(8000, audio.output_frequency.value()); in TEST_F()
35 EXPECT_FALSE(audio.channels.is_present()); in TEST_F()
36 EXPECT_EQ(static_cast<std::uint64_t>(1), audio.channels.value()); in TEST_F()
38 EXPECT_FALSE(audio.bit_depth.is_present()); in TEST_F()
39 EXPECT_EQ(static_cast<std::uint64_t>(0), audio.bit_depth.value()); in TEST_F()
[all …]
/external/autotest/metadata/tests/
Daudio.star13 'audio/Aconnect',
14 suites = ['audio'],
18 'audio/ActiveStreamStress',
23 'audio/Aplay',
28 'audio/AudioBasicAssistant',
33 'audio/AudioBasicBluetoothPlayback',
38 'audio/AudioBasicBluetoothPlaybackRecord',
43 'audio/AudioBasicBluetoothRecord',
48 'audio/AudioBasicExternalMicrophone',
53 'audio/AudioBasicHDMI',
[all …]
/external/autotest/client/cros/chameleon/
Daudio_board.py4 """This module provides the audio board interface."""
12 """AudioBoard is an abstraction of an audio board on a Chameleon board.
14 It provides methods to control audio board.
37 """Gets an audio bus on this audio board.
47 """Gets an AudioJackPlugger on this audio board.
49 @returns: An AudioJackPlugger object if there is an audio jack plugger.
50 None if there is no audio jack plugger.
52 AudioJackPluggerException if there is no jack plugger on this audio
62 'There is no jack plugger on this audio board. Please '
69 """Gets an BluetoothController on this audio board.
[all …]
/external/webrtc/sdk/objc/native/src/
Dobjc_audio_device.h16 #import "components/audio/RTCAudioDevice.h"
35 // Retrieve the currently utilized audio layer
38 // Full-duplex transportation of PCM audio
62 // Audio transport initialization
70 // Audio transport control
78 // Audio mixer initialization
124 // Enables the built-in audio effects. Only supported on Android.
152 // Notifies `ObjCAudioDeviceModule` that at least one of the audio input
153 // parameters or audio input latency of `RTCAudioDevice` has changed. It necessary to
154 // update `record_parameters_` with current audio parameter of `RTCAudioDevice`
[all …]
/external/exoplayer/tree_8e57d3715f9092d5ec54ebe2e538f34bfcc34479/docs/doc/reference/com/google/android/exoplayer2/audio/
Dpackage-tree.html5 <title>com.google.android.exoplayer2.audio Class Hierarchy (ExoPlayer library)</title>
22 … parent.document.title="com.google.android.exoplayer2.audio Class Hierarchy (ExoPlayer library)";
94 <h1 class="title">Hierarchy For Package com.google.android.exoplayer2.audio</h1>
106 …rcle">com.google.android.exoplayer2.audio.<a href="AacUtil.html" title="class in com.google.androi…
107 …e">com.google.android.exoplayer2.audio.<a href="AacUtil.Config.html" title="class in com.google.an…
108 …rcle">com.google.android.exoplayer2.audio.<a href="Ac3Util.html" title="class in com.google.androi…
109 …om.google.android.exoplayer2.audio.<a href="Ac3Util.SyncFrameInfo.html" title="class in com.google…
110 …rcle">com.google.android.exoplayer2.audio.<a href="Ac4Util.html" title="class in com.google.androi…
111 …om.google.android.exoplayer2.audio.<a href="Ac4Util.SyncFrameInfo.html" title="class in com.google…
112 …">com.google.android.exoplayer2.audio.<a href="AudioAttributes.html" title="class in com.google.an…
[all …]
Dpackage-summary.html5 <title>com.google.android.exoplayer2.audio (ExoPlayer library)</title>
22 parent.document.title="com.google.android.exoplayer2.audio (ExoPlayer library)";
94 <h1 title="Package" class="title">Package&nbsp;com.google.android.exoplayer2.audio</h1>
107 …Receiver.Listener.html" title="interface in com.google.android.exoplayer2.audio">AudioCapabilities…
109 <div class="block">Listener notified when audio capabilities change.</div>
113 …f="AudioProcessor.html" title="interface in com.google.android.exoplayer2.audio">AudioProcessor</a…
115 <div class="block">Interface for audio processors, which take audio data as input and transform it,…
120 …ererEventListener.html" title="interface in com.google.android.exoplayer2.audio">AudioRendererEven…
122 <div class="block">Listener of audio <a href="../Renderer.html" title="interface in com.google.andr…
126 …a href="AudioSink.html" title="interface in com.google.android.exoplayer2.audio">AudioSink</a></th>
[all …]
/external/exoplayer/tree_15dc86382f17a24a3e881e52e31a810c1ea44b49/docs/doc/reference/com/google/android/exoplayer2/audio/
Dpackage-tree.html5 <title>com.google.android.exoplayer2.audio Class Hierarchy (ExoPlayer library)</title>
22 … parent.document.title="com.google.android.exoplayer2.audio Class Hierarchy (ExoPlayer library)";
94 <h1 class="title">Hierarchy For Package com.google.android.exoplayer2.audio</h1>
106 …rcle">com.google.android.exoplayer2.audio.<a href="AacUtil.html" title="class in com.google.androi…
107 …e">com.google.android.exoplayer2.audio.<a href="AacUtil.Config.html" title="class in com.google.an…
108 …rcle">com.google.android.exoplayer2.audio.<a href="Ac3Util.html" title="class in com.google.androi…
109 …om.google.android.exoplayer2.audio.<a href="Ac3Util.SyncFrameInfo.html" title="class in com.google…
110 …rcle">com.google.android.exoplayer2.audio.<a href="Ac4Util.html" title="class in com.google.androi…
111 …om.google.android.exoplayer2.audio.<a href="Ac4Util.SyncFrameInfo.html" title="class in com.google…
112 …">com.google.android.exoplayer2.audio.<a href="AudioAttributes.html" title="class in com.google.an…
[all …]
/external/webrtc/test/pc/e2e/
Dpeer_connection_e2e_smoke_test.cc29 #include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
139 AudioConfig audio; in TEST_F() local
140 audio.stream_label = "alice-audio"; in TEST_F()
141 audio.mode = AudioConfig::Mode::kFile; in TEST_F()
142 audio.input_file_name = in TEST_F()
144 audio.sampling_frequency_in_hz = 48000; in TEST_F()
145 audio.sync_group = "alice-media"; in TEST_F()
146 alice->SetAudioConfig(std::move(audio)); in TEST_F()
161 AudioConfig audio; in TEST_F() local
162 audio.stream_label = "charlie-audio"; in TEST_F()
[all …]
/external/python/cpython2/Doc/library/
Dal.rst2 :mod:`al` --- Audio functions on the SGI
7 :synopsis: Audio functions on the SGI.
14 This module provides access to the audio facilities of the SGI Indy and Indigo
25 Symbolic constants from the C header file ``<audio.h>`` are defined in the
30 The current version of the audio library may dump core when bad argument values
44 :dfn:`audio port object`; methods of audio port objects are described below.
49 The return value is a new :dfn:`audio configuration object`; methods of audio
79 .. method:: audio configuration.getqueuesize()
84 .. method:: audio configuration.setqueuesize(size)
89 .. method:: audio configuration.getwidth()
[all …]
Dsunaudio.rst2 :mod:`sunaudiodev` --- Access to Sun audio hardware
7 :synopsis: Access to Sun audio hardware.
17 This module allows you to access the Sun audio interface. The Sun audio hardware
18 is capable of recording and playing back audio data in u-LAW format with a
20 :manpage:`audio(7I)` manual page.
38 This function opens the audio device and returns a Sun audio device object. This
43 to open the device only for the activity needed. See :manpage:`audio(7I)` for
47 ``AUDIODEV`` for the base audio device filename. If not found, it falls back to
48 :file:`/dev/audio`. The control device is calculated by appending "ctl" to the
49 base audio device.
[all …]
/external/webrtc/modules/audio_coding/g3doc/
Dindex.md4 # The WebRTC Audio Coding Module
6 WebRTC audio coding module can handle both audio sending and receiving. Folder
9 * Audio Sending Audio frames, each of which should always contain 10 ms worth
10 of data, are provided to the audio coding module through
11 [`Add10MsData()`][Add10MsData]. The audio coding module uses a provided
12 audio encoder to encoded audio frames and deliver the data to a
13 pre-registered audio packetization callback, which is supposed to wrap the
14 encoded audio into RTP packets and send them over a transport. Built-in
15 audio codecs are included the [`codecs`][codecs] folder. The
16 [audio network adaptor][ANA] provides an add-on functionality to an audio
[all …]
/external/flac/test/cuesheets/
Dgood.001.cue5 TRACK 01 AUDIO
8 TRACK 02 AUDIO
111 TRACK 03 AUDIO
115 TRACK 04 AUDIO
118 TRACK 05 AUDIO
121 TRACK 06 AUDIO
124 TRACK 07 AUDIO
127 TRACK 08 AUDIO
129 TRACK 09 AUDIO
132 TRACK 10 AUDIO
[all …]
/external/ltp/testcases/kernel/device-drivers/v4l/user_space/doc/spec/
Dx341.htm5 >Audio Inputs and Outputs</TITLE
76 NAME="AUDIO"
77 >1.5. Audio Inputs and Outputs</A
80 >Audio inputs and outputs are physical connectors of a
82 outputs, zero or more each. Radio devices have no audio inputs or
90 > an audio source, but this API associates
100 audio signal to a sound card is not considered an audio output.</P
102 >Audio and video inputs and outputs are associated. Selecting
103 a video source also selects an audio source. This is most evident when
104 the video and audio source is a tuner. Further audio connectors can
[all …]
/external/oboe/docs/
DOpenSLESMigration.md6 …ode from [OpenSL ES for Android](https://developer.android.com/ndk/guides/audio/opensl/opensl-for-…
13 …th an audio device capable of playing or recording audio samples. They also use a callback mechani…
17 …to reduce the amount of boilerplate code and guesswork associated with recording and playing audio.
25 OpenSL uses an audio engine object, created using `slCreateEngine`, to create other objects. Oboe's…
27 OpenSL uses audio player and audio recorder objects to communicate with audio devices. In Oboe an `…
29audio callback mechanism is a user-defined function which is called each time a buffer is enqueued…
42 <td>Audio engine (an <code>SLObjectItf</code>)
48 <td>Audio player
54 <td>Audio recorder
75 This is a container array which you can read audio data from when recording, or write data into whe…
[all …]
/external/tensorflow/tensorflow/lite/g3doc/inference_with_metadata/task_library/
Daudio_classifier.md1 # Integrate audio classifiers
3 Audio classification is a common use case of Machine Learning to classify the
6 The Task Library `AudioClassifier` API can be used to deploy your custom audio
11 * Input audio processing, e.g. converting PCM 16 bit encoding to PCM
12 Float encoding and the manipulation of the audio ring buffer.
26 ## Supported audio classifier models
32 …[TensorFlow Lite Model Maker for Audio Classification](https://www.tensorflow.org/lite/api_docs/py…
35 …[pretrained audio event classification models on TensorFlow Hub](https://tfhub.dev/google/lite-mod…
43 [Audio Classification reference app](https://github.com/tensorflow/examples/tree/master/lite/exampl…
65 // Import the Audio Task Library dependency (NNAPI is included)
[all …]
/external/python/cpython3/Lib/test/
Dmime.types266 # atx: audio/ATRAC-X
828 # stm: audio/x-stm
936 application/vnd.yamaha.smaf-audio saf
966 # mod: audio/x-mod
977 audio/1d-interleaved-parityfec
978 audio/32kadpcm 726
980 audio/3gpp
982 audio/3gpp2
983 audio/ac3 ac3
984 audio/AMR amr
[all …]
/external/webrtc/modules/audio_device/android/
Dopensles_player.h30 // Implements 16-bit mono PCM audio output support for Android using the
35 // will RTC_DCHECK if any method is called on an invalid thread. Decoded audio
44 // garbage collection pauses and it supports reduced audio output latency.
54 // configuration provided via the audio manager at construction.
86 // Reads audio data in PCM format using the AudioDeviceBuffer.
88 // internal audio thread while output streaming is active.
89 // If the `silence` flag is set, the audio is filled with zeros instead of
90 // asking the WebRTC layer for real audio data. This procedure is also known
91 // as audio priming.
94 // Allocate memory for audio buffers which will be used to render audio
[all …]
/external/autotest/client/cros/multimedia/
Daudio_extension_handler.py5 """Handler for audio extension functionality."""
18 """Wrapper around test extension that uses chrome.audio API to get audio
32 """Checks chrome.audio is available.
39 "chrome.audio") != None),
42 raise AudioExtensionHandlerError('chrome.audio is not available.')
47 """Gets whether the chrome.audio is available."""
48 return self._extension.EvaluateJavaScript("chrome.audio") != None
53 """Gets the audio device info from Chrome audio API.
69 The filter param defaults to {}, requests all available audio
76 The unique identifier of the audio device.
[all …]
/external/tensorflow/tensorflow/lite/g3doc/android/tutorials/
Daudio_classification.md4 learning models to recognize sounds and spoken words in an Android app. Audio
8 ![Audio recognition animated demo](https://storage.googleapis.com/download.tensorflow.org/tflite/ex…
13 [Task Library for Audio](https://www.tensorflow.org/lite/inference_with_metadata/task_library/audio…
14 which handles most of the audio data recording and preprocessing. For more
15 information on how audio is pre-processed for use with machine learning models,
17 [Audio Data Preparation and Augmentation](https://www.tensorflow.org/io/tutorials/audio).
19 ## Audio classification with machine learning
22 audio samples recorded with a microphone on an Android device. The example app
30 models run predictions on audio clips that contain 15600 individual samples per
89 [enable audio input](https://developer.android.com/studio/releases/emulator#29.0.6-host-audio)
[all …]
/external/vboot_reference/firmware/lib/
Dvboot_audio.c62 static void VbGetDevMusicNotes(VbAudioContext *audio, int use_short) in VbGetDevMusicNotes() argument
85 if (!audio->background_beep) in VbGetDevMusicNotes()
192 audio->music_notes = notebuf; in VbGetDevMusicNotes()
193 audio->note_count = count; in VbGetDevMusicNotes()
194 audio->free_notes_when_done = 1; in VbGetDevMusicNotes()
200 audio->music_notes = builtin; in VbGetDevMusicNotes()
201 audio->note_count = count; in VbGetDevMusicNotes()
202 audio->free_notes_when_done = 0; in VbGetDevMusicNotes()
212 VbAudioContext *audio = &au; in VbAudioOpen() local
218 /* Calibrate audio delay */ in VbAudioOpen()
[all …]
/external/googleapis/google/cloud/speech/v1/
Dcloud_speech.proto42 // Performs synchronous speech recognition: receive results after all audio
49 option (google.api.method_signature) = "config,audio";
64 option (google.api.method_signature) = "config,audio";
72 // sending audio. This method is only available via the gRPC API (not REST).
83 // Required. The audio data to be recognized.
84 RecognitionAudio audio = 2 [(google.api.field_behavior) = REQUIRED]; field
94 // Required. The audio data to be recognized.
95 RecognitionAudio audio = 2 [(google.api.field_behavior) = REQUIRED]; field
118 // The streaming request, which is either a streaming config or audio content.
125 // The audio data to be recognized. Sequential chunks of audio data are sent
[all …]
/external/googleapis/google/cloud/speech/v1p1beta1/
Dcloud_speech.proto42 // Performs synchronous speech recognition: receive results after all audio
49 option (google.api.method_signature) = "config,audio";
64 option (google.api.method_signature) = "config,audio";
72 // sending audio. This method is only available via the gRPC API (not REST).
83 // Required. The audio data to be recognized.
84 RecognitionAudio audio = 2 [(google.api.field_behavior) = REQUIRED]; field
94 // Required. The audio data to be recognized.
95 RecognitionAudio audio = 2 [(google.api.field_behavior) = REQUIRED]; field
118 // The streaming request, which is either a streaming config or audio content.
125 // The audio data to be recognized. Sequential chunks of audio data are sent
[all …]

12345678910>>...235